diff --git a/.codespellrc b/.codespellrc index 1a7bf949281..5f3a5847be4 100644 --- a/.codespellrc +++ b/.codespellrc @@ -1,6 +1,6 @@ [codespell] # Ref: https://github.com/codespell-project/codespell#using-a-config-file -skip = *.svg,*.sublime-workspace,*.lock,.codespellrc,./util/target/,./binaryData,./node_modules,./pg,./project/target,./target,./webknossos-datastore/target,./webknossos-jni/target,./webknossos-tracingstore/target,./util/target,./coverage,./public-test,./tools/proxy/node_modules,./docs/publications.md,./public/bundle +skip = *.svg,*.sublime-workspace,*.lock,.codespellrc,./util/target/,./binaryData,./node_modules,./pg,./project/target,./target,./webknossos-datastore/target,./webknossos-jni/target,./webknossos-tracingstore/target,./util/target,./coverage,./public-test,./tools/proxy/node_modules,./docs/publications.md,./public/bundle,./tools/migration-unified-annotation-versioning/venv # some names and camelCased variables etc ignore-regex = \b([a-z]+[A-Z][a-zA-Z]*|H Mattern|Manuel|Nat Commun)\b ignore-words-list = lod,nd,ue diff --git a/.editorconfig b/.editorconfig index 426b13cd4a2..4566e12eedd 100644 --- a/.editorconfig +++ b/.editorconfig @@ -9,6 +9,9 @@ charset = utf-8 trim_trailing_whitespace = true insert_final_newline = true +[*.py] +indent_size = 4 + [*.md] trim_trailing_whitespace = false diff --git a/.gitignore b/.gitignore index 3d0c1ff67ad..a9d185a95c5 100755 --- a/.gitignore +++ b/.gitignore @@ -24,6 +24,7 @@ RUNNING_PID .bloop .metals metals.sbt +__pycache__/ # Webknossos @@ -109,4 +110,4 @@ metals.sbt !.yarn/releases !.yarn/sdks !.yarn/versions -tools/**/.yarn/* \ No newline at end of file +tools/**/.yarn/* diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index e4e02843cd7..d2562910b79 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -13,6 +13,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released ### Added - Added the total volume of a dataset to a tooltip in the dataset info tab. [#8229](https://github.com/scalableminds/webknossos/pull/8229) - Optimized performance of data loading with “fill value“ chunks. [#8271](https://github.com/scalableminds/webknossos/pull/8271) +- When using the “Restore older Version” feature, there are no longer separate tabs for the different annotation layers. Only one linear annotation history is now used, and if you revert to an older version, all layers are reverted. If layers were added/deleted since then, that is also reverted. This also means that proofreading annotations can now be reverted to older versions as well. The description text of annotations is now versioned as well. [#7917](https://github.com/scalableminds/webknossos/pull/7917) ### Changed - Renamed "resolution" to "magnification" in more places within the codebase, including local variables. [#8168](https://github.com/scalableminds/webknossos/pull/8168) @@ -26,6 +27,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Fixed that listing datasets with the `api/datasets` route without compression failed due to missing permissions regarding public datasets. [#8249](https://github.com/scalableminds/webknossos/pull/8249) - A "Locked by anonymous user" banner is no longer shown when opening public editable annotations of other organizations. [#8273](https://github.com/scalableminds/webknossos/pull/8273) - Fixed a bug that uploading a zarr dataset with an already existing `datasource-properties.json` file failed. [#8268](https://github.com/scalableminds/webknossos/pull/8268) +- Fixed that the frontend did not ensure a minimum length for annotation layer names. Moreover, names starting with a `.` are also disallowed now. [#8244](https://github.com/scalableminds/webknossos/pull/8244) - Fixed the organization switching feature for datasets opened via old links. [#8257](https://github.com/scalableminds/webknossos/pull/8257) - Fixed that uploading an NML file without an organization id failed. Now the user's organization is used as fallback. [#8277](https://github.com/scalableminds/webknossos/pull/8277) - Fixed that the frontend did not ensure a minium length for annotation layer names. Moreover, names starting with a `.` are also disallowed now. [#8244](https://github.com/scalableminds/webknossos/pull/8244) @@ -38,5 +40,6 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released ### Removed - Removed support for HTTP API versions 3 and 4. [#8075](https://github.com/scalableminds/webknossos/pull/8075) +- Removed the feature to downsample existing volume annotations. All new volume annotations had a whole mag stack since [#4755](https://github.com/scalableminds/webknossos/pull/4755) (four years ago). [#7917](https://github.com/scalableminds/webknossos/pull/7917) ### Breaking Changes diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index 681f5a88093..fb3a567e35d 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -8,6 +8,12 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md). ## Unreleased [Commits](https://github.com/scalableminds/webknossos/compare/24.12.0...HEAD) - Removed support for HTTP API versions 3 and 4. [#8075](https://github.com/scalableminds/webknossos/pull/8075) +- The migration route `addSegmentIndex` was removed. If you haven’t done this yet, but need segment indices for your volume annotations, upgrade to an earlier version first, call addSegmentIndex, and then upgrade again. [#7917](https://github.com/scalableminds/webknossos/pull/7917) +- The versioning scheme of annotations has been changed. That requires a larger migration including the FossilDB content. [#7917](https://github.com/scalableminds/webknossos/pull/7917) + - New FossilDB version `0.1.33` (docker image `scalableminds/fossildb:master__504`) is required. + - For the migration, a second FossilDB needs to be started. To do that, either use the docker image, a jar, or checkout the [fossilDB repository](https://github.com/scalableminds/fossildb). If you opened your old FossilDB with an options file, it probably makes sense to use the same options file for the new one as well. + - FossilDB must now be opened with new column family set `skeletons,volumes,volumeData,volumeSegmentIndex,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates`. + - The FossilDB content needs to be migrated. For that, use the python program at `tools/migration-unified-annotation-versioning` (see python main.py --help for instructions). Note that it writes to a completely new FossilDB, that must first be opened with the new column families, see above. The migration code needs to connect to postgres, to the old FossilDB and to the new. After the migration, replace the old FossilDB by the new one (either change the ports of the existing programs, or exchange the data directories on disk). The migration can also be run in several steps so that the majority of the data can already be migrated while WEBKNOSSOS is still running. Then only annotations that have been edited again since the first run need to be migrated in the incremental second run during a WEBKNOSSOS downtime. ### Postgres Evolutions: - [124-decouple-dataset-directory-from-name](conf/evolutions/124-decouple-dataset-directory-from-name) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index a6162e682dd..63b5ca0883f 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -1,38 +1,31 @@ package controllers -import org.apache.pekko.util.Timeout -import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} -import com.scalableminds.util.geometry.BoundingBox import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType import com.scalableminds.webknossos.datastore.models.annotation.{ AnnotationLayer, AnnotationLayerStatistics, AnnotationLayerType } -import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis -import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.tracingstore.tracings.volume.MagRestrictions -import com.scalableminds.webknossos.tracingstore.tracings.{TracingIds, TracingType} +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters +import com.scalableminds.webknossos.tracingstore.tracings.{TracingId, TracingType} import mail.{MailchimpClient, MailchimpTag} import models.analytics.{AnalyticsService, CreateAnnotationEvent, OpenAnnotationEvent} import models.annotation.AnnotationState.Cancelled import models.annotation._ import models.dataset.{DatasetDAO, DatasetService} -import models.organization.OrganizationDAO import models.project.ProjectDAO import models.task.TaskDAO import models.team.{TeamDAO, TeamService} import models.user.time._ import models.user.{User, UserDAO, UserService} -import net.liftweb.common.Box +import org.apache.pekko.util.Timeout import play.api.i18n.{Messages, MessagesProvider} -import play.api.libs.json.Json.WithDefaultValues import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} +import play.silhouette.api.Silhouette import security.{URLSharing, UserAwareRequestLogging, WkEnv} import telemetry.SlackNotificationService import utils.WkConf @@ -41,26 +34,12 @@ import javax.inject.Inject import scala.concurrent.ExecutionContext import scala.concurrent.duration._ -case class AnnotationLayerParameters(typ: AnnotationLayerType, - fallbackLayerName: Option[String], - autoFallbackLayer: Boolean = false, - mappingName: Option[String] = None, - magRestrictions: Option[MagRestrictions], - name: Option[String], - additionalAxes: Option[Seq[AdditionalAxis]]) -object AnnotationLayerParameters { - implicit val jsonFormat: OFormat[AnnotationLayerParameters] = - Json.using[WithDefaultValues].format[AnnotationLayerParameters] -} - class AnnotationController @Inject()( annotationDAO: AnnotationDAO, annotationLayerDAO: AnnotationLayerDAO, taskDAO: TaskDAO, userDAO: UserDAO, - organizationDAO: OrganizationDAO, datasetDAO: DatasetDAO, - tracingStoreDAO: TracingStoreDAO, datasetService: DatasetService, annotationService: AnnotationService, annotationMutexService: AnnotationMutexService, @@ -76,9 +55,7 @@ class AnnotationController @Inject()( analyticsService: AnalyticsService, slackNotificationService: SlackNotificationService, mailchimpClient: MailchimpClient, - tracingDataSourceTemporaryStore: TracingDataSourceTemporaryStore, conf: WkConf, - rpc: RPC, sil: Silhouette[WkEnv])(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with UserAwareRequestLogging @@ -92,7 +69,7 @@ class AnnotationController @Inject()( // For Task and Explorational annotations, id is an annotation id. For CompoundTask, id is a task id. For CompoundProject, id is a project id. For CompoundTaskType, id is a task type id id: String, // Timestamp in milliseconds (time at which the request is sent) - timestamp: Long): Action[AnyContent] = sil.UserAwareAction.async { implicit request => + timestamp: Option[Long]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => log() { val notFoundMessage = if (request.identity.isEmpty) "annotation.notFound.considerLoggingIn" else "annotation.notFound" @@ -105,10 +82,14 @@ class AnnotationController @Inject()( js <- annotationService .publicWrites(annotation, request.identity, Some(restrictions)) ?~> "annotation.write.failed" _ <- Fox.runOptional(request.identity) { user => - if (typedTyp == AnnotationType.Task || typedTyp == AnnotationType.Explorational) { - timeSpanService - .logUserInteractionIfTheyArePotentialContributor(Instant(timestamp), user, annotation) // log time when a user starts working - } else Fox.successful(()) + Fox.runOptional(timestamp) { timestampDefined => + if (typedTyp == AnnotationType.Task || typedTyp == AnnotationType.Explorational) { + timeSpanService.logUserInteractionIfTheyArePotentialContributor( + Instant(timestampDefined), + user, + annotation) // log time when a user starts working + } else Fox.successful(()) + } } _ = Fox.runOptional(request.identity)(user => userDAO.updateLastActivity(user._id)) _ = request.identity.foreach { user => @@ -119,11 +100,11 @@ class AnnotationController @Inject()( } def infoWithoutType(id: String, - // Timestamp in milliseconds (time at which the request is sent - timestamp: Long): Action[AnyContent] = sil.UserAwareAction.async { implicit request => + // Timestamp in milliseconds (time at which the request is sent) + timestamp: Option[Long]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => log() { for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND + annotation <- provider.provideAnnotation(id, request.identity) ?~> "annotation.notFound" ~> NOT_FOUND result <- info(annotation.typ.toString, id, timestamp)(request) } yield result @@ -133,9 +114,9 @@ class AnnotationController @Inject()( def merge(typ: String, id: String, mergedTyp: String, mergedId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - annotationA <- provider.provideAnnotation(typ, id, request.identity) ~> NOT_FOUND - annotationB <- provider.provideAnnotation(mergedTyp, mergedId, request.identity) ~> NOT_FOUND - mergedAnnotation <- annotationMerger.mergeTwo(annotationA, annotationB, persistTracing = true, request.identity) ?~> "annotation.merge.failed" + annotationA <- provider.provideAnnotation(typ, id, request.identity) ?~> "annotation.notFound" ~> NOT_FOUND + annotationB <- provider.provideAnnotation(mergedTyp, mergedId, request.identity) ?~> "annotation.notFound" ~> NOT_FOUND + mergedAnnotation <- annotationMerger.mergeTwo(annotationA, annotationB, request.identity) ?~> "annotation.merge.failed" restrictions = annotationRestrictionDefaults.defaultsFor(mergedAnnotation) _ <- restrictions.allowAccess(request.identity) ?~> Messages("notAllowed") ~> FORBIDDEN _ <- annotationDAO.insertOne(mergedAnnotation) @@ -146,14 +127,14 @@ class AnnotationController @Inject()( def mergeWithoutType(id: String, mergedTyp: String, mergedId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND + annotation <- provider.provideAnnotation(id, request.identity) ?~> "annotation.notFound" ~> NOT_FOUND result <- merge(annotation.typ.toString, id, mergedTyp, mergedId)(request) } yield result } def reset(typ: String, id: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - annotation <- provider.provideAnnotation(typ, id, request.identity) ~> NOT_FOUND + annotation <- provider.provideAnnotation(typ, id, request.identity) ?~> "annotation.notFound" ~> NOT_FOUND _ <- Fox.assertTrue(userService.isTeamManagerOrAdminOf(request.identity, annotation._team)) _ <- annotationService.resetToBase(annotation) ?~> "annotation.reset.failed" updated <- provider.provideAnnotation(typ, id, request.identity) @@ -195,54 +176,6 @@ class AnnotationController @Inject()( } yield JsonOk(json, Messages("annotation.isLockedByOwner.success")) } - def addAnnotationLayer(typ: String, id: String): Action[AnnotationLayerParameters] = - sil.SecuredAction.async(validateJson[AnnotationLayerParameters]) { implicit request => - for { - _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.addLayer.explorationalsOnly" - restrictions <- provider.restrictionsFor(typ, id) ?~> "restrictions.notFound" ~> NOT_FOUND - _ <- restrictions.allowUpdate(request.identity) ?~> "notAllowed" ~> FORBIDDEN - annotation <- provider.provideAnnotation(typ, id, request.identity) - newLayerName = request.body.name.getOrElse(AnnotationLayer.defaultNameForType(request.body.typ)) - _ <- bool2Fox(!annotation.annotationLayers.exists(_.name == newLayerName)) ?~> "annotation.addLayer.nameInUse" - organization <- organizationDAO.findOne(request.identity._organization) - _ <- annotationService.addAnnotationLayer(annotation, organization._id, request.body) - updated <- provider.provideAnnotation(typ, id, request.identity) - json <- annotationService.publicWrites(updated, Some(request.identity)) ?~> "annotation.write.failed" - } yield JsonOk(json) - } - - def addAnnotationLayerWithoutType(id: String): Action[AnnotationLayerParameters] = - sil.SecuredAction.async(validateJson[AnnotationLayerParameters]) { implicit request => - for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND - result <- addAnnotationLayer(annotation.typ.toString, id)(request) - } yield result - } - - def deleteAnnotationLayer(typ: String, id: String, layerName: String): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - for { - _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.deleteLayer.explorationalsOnly" - annotation <- provider.provideAnnotation(typ, id, request.identity) - _ <- bool2Fox(annotation._user == request.identity._id) ?~> "notAllowed" ~> FORBIDDEN - layer <- annotation.annotationLayers.find(annotationLayer => annotationLayer.name == layerName) ?~> Messages( - "annotation.layer.notFound", - layerName) - _ <- bool2Fox(annotation.annotationLayers.length != 1) ?~> "annotation.deleteLayer.onlyLayer" - _ = logger.info( - s"Deleting annotation layer $layerName (tracing id ${layer.tracingId}, typ ${layer.typ}) for annotation $id") - _ <- annotationService.deleteAnnotationLayer(annotation, layerName) - } yield Ok - } - - def deleteAnnotationLayerWithoutType(id: String, layerName: String): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND - result <- deleteAnnotationLayer(annotation.typ.toString, id, layerName)(request) - } yield result - } - def createExplorational(datasetId: String): Action[List[AnnotationLayerParameters]] = sil.SecuredAction.async(validateJson[List[AnnotationLayerParameters]]) { implicit request => for { @@ -274,7 +207,7 @@ class AnnotationController @Inject()( ObjectId.dummyId, ObjectId.dummyId, List( - AnnotationLayer(TracingIds.dummyTracingId, + AnnotationLayer(TracingId.dummy, AnnotationLayerType.Skeleton, AnnotationLayer.defaultSkeletonLayerName, AnnotationLayerStatistics.unknown)) @@ -283,116 +216,6 @@ class AnnotationController @Inject()( } yield JsonOk(json) } - def makeHybrid(typ: String, id: String, fallbackLayerName: Option[String]): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - for { - _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.addLayer.explorationalsOnly" - restrictions <- provider.restrictionsFor(typ, id) ?~> "restrictions.notFound" ~> NOT_FOUND - _ <- restrictions.allowUpdate(request.identity) ?~> "notAllowed" ~> FORBIDDEN - annotation <- provider.provideAnnotation(typ, id, request.identity) - organization <- organizationDAO.findOne(request.identity._organization) - _ <- annotationService.makeAnnotationHybrid(annotation, organization._id, fallbackLayerName) ?~> "annotation.makeHybrid.failed" - updated <- provider.provideAnnotation(typ, id, request.identity) - json <- annotationService.publicWrites(updated, Some(request.identity)) ?~> "annotation.write.failed" - } yield JsonOk(json) - } - - def makeHybridWithoutType(id: String, fallbackLayerName: Option[String]): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND - result <- makeHybrid(annotation.typ.toString, id, fallbackLayerName)(request) - } yield result - } - - def downsample(typ: String, id: String, tracingId: String): Action[AnyContent] = sil.SecuredAction.async { - implicit request => - for { - _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.downsample.explorationalsOnly" - restrictions <- provider.restrictionsFor(typ, id) ?~> "restrictions.notFound" ~> NOT_FOUND - _ <- restrictions.allowUpdate(request.identity) ?~> "notAllowed" ~> FORBIDDEN - annotation <- provider.provideAnnotation(typ, id, request.identity) - annotationLayer <- annotation.annotationLayers - .find(_.tracingId == tracingId) - .toFox ?~> "annotation.downsample.layerNotFound" - _ <- annotationService.downsampleAnnotation(annotation, annotationLayer) ?~> "annotation.downsample.failed" - updated <- provider.provideAnnotation(typ, id, request.identity) - json <- annotationService.publicWrites(updated, Some(request.identity)) ?~> "annotation.write.failed" - } yield JsonOk(json) - } - - def downsampleWithoutType(id: String, tracingId: String): Action[AnyContent] = sil.SecuredAction.async { - implicit request => - for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND - result <- downsample(annotation.typ.toString, id, tracingId)(request) - } yield result - } - - def addSegmentIndicesToAll(parallelBatchCount: Int, - dryRun: Boolean, - skipTracings: Option[String]): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - { - for { - _ <- userService.assertIsSuperUser(request.identity._multiUser) ?~> "notAllowed" ~> FORBIDDEN - _ = logger.info("Running migration to add segment index to all volume annotation layers...") - skipTracingsSet = skipTracings.map(_.split(",").toSet).getOrElse(Set()) - _ = if (skipTracingsSet.nonEmpty) { - logger.info(f"Skipping these tracings: ${skipTracingsSet.mkString(",")}") - } - _ = logger.info("Gathering list of volume tracings...") - annotationLayers <- annotationLayerDAO.findAllVolumeLayers - annotationLayersFiltered = annotationLayers.filter(l => !skipTracingsSet.contains(l.tracingId)) - totalCount = annotationLayersFiltered.length - batches = batch(annotationLayersFiltered, parallelBatchCount) - _ = logger.info(f"Processing $totalCount tracings in ${batches.length} batches") - before = Instant.now - results: Seq[List[Box[Unit]]] <- Fox.combined(batches.zipWithIndex.map { - case (batch, index) => addSegmentIndicesToBatch(batch, index, dryRun) - }) - failures = results.flatMap(_.filter(_.isEmpty)) - failureCount: Int = failures.length - successCount: Int = results.map(_.count(_.isDefined)).sum - msg = s"All done (dryRun=$dryRun)! Processed $totalCount tracings in ${batches.length} batches. Took ${Instant - .since(before)}. $failureCount failures, $successCount successes." - _ = if (failures.nonEmpty) { - failures.foreach { failedBox => - logger.info(f"Failed: $failedBox") - } - } - _ = logger.info(msg) - } yield JsonOk(msg) - } - } - - private def addSegmentIndicesToBatch(annotationLayerBatch: List[AnnotationLayer], batchIndex: Int, dryRun: Boolean)( - implicit ec: ExecutionContext) = { - var processedCount = 0 - for { - tracingStore <- tracingStoreDAO.findFirst(GlobalAccessContext) ?~> "tracingStore.notFound" - client = new WKRemoteTracingStoreClient(tracingStore, null, rpc, tracingDataSourceTemporaryStore) - batchCount = annotationLayerBatch.length - results <- Fox.serialSequenceBox(annotationLayerBatch) { annotationLayer => - processedCount += 1 - logger.info( - f"Processing tracing ${annotationLayer.tracingId}. $processedCount of $batchCount in batch $batchIndex (${percent(processedCount, batchCount)})...") - client.addSegmentIndex(annotationLayer.tracingId, dryRun) ?~> s"add segment index failed for ${annotationLayer.tracingId}" - } - _ = logger.info(f"Batch $batchIndex is done. Processed ${annotationLayerBatch.length} tracings.") - } yield results - } - - private def batch[T](allItems: List[T], batchCount: Int): List[List[T]] = { - val batchSize: Int = Math.max(Math.min(allItems.length / batchCount, allItems.length), 1) - allItems.grouped(batchSize).toList - } - - private def percent(done: Int, pending: Int) = { - val value = done.toDouble / pending.toDouble * 100 - f"$value%1.1f %%" - } - private def finishAnnotation(typ: String, id: String, issuingUser: User, timestamp: Instant)( implicit ctx: DBAccessContext): Fox[(Annotation, String)] = for { @@ -596,33 +419,25 @@ class AnnotationController @Inject()( datasetService.dataSourceFor(dataset).flatMap(_.toUsable).map(Some(_)) else Fox.successful(None) tracingStoreClient <- tracingStoreService.clientFor(dataset) - newAnnotationLayers <- Fox.serialCombined(annotation.annotationLayers) { annotationLayer => - duplicateAnnotationLayer(annotationLayer, - annotation._task.isDefined, - dataSource.map(_.boundingBox), - tracingStoreClient) - } + newAnnotationId = ObjectId.generate + newAnnotationProto <- tracingStoreClient.duplicateAnnotation( + annotation._id, + newAnnotationId, + version = None, + isFromTask = annotation._task.isDefined, + datasetBoundingBox = dataSource.map(_.boundingBox) + ) + newAnnotationLayers = newAnnotationProto.annotationLayers.map(AnnotationLayer.fromProto) clonedAnnotation <- annotationService.createFrom(user, dataset, newAnnotationLayers, AnnotationType.Explorational, None, - annotation.description) ?~> Messages("annotation.create.failed") + annotation.description, + newAnnotationId) ?~> Messages("annotation.create.failed") + _ <- annotationDAO.insertOne(clonedAnnotation) } yield clonedAnnotation - private def duplicateAnnotationLayer(annotationLayer: AnnotationLayer, - isFromTask: Boolean, - datasetBoundingBox: Option[BoundingBox], - tracingStoreClient: WKRemoteTracingStoreClient): Fox[AnnotationLayer] = - for { - - newTracingId <- if (annotationLayer.typ == AnnotationLayerType.Skeleton) { - tracingStoreClient.duplicateSkeletonTracing(annotationLayer.tracingId, None, isFromTask) ?~> "Failed to duplicate skeleton tracing." - } else { - tracingStoreClient.duplicateVolumeTracing(annotationLayer.tracingId, isFromTask, datasetBoundingBox) ?~> "Failed to duplicate volume tracing." - } - } yield annotationLayer.copy(tracingId = newTracingId) - def tryAcquiringAnnotationMutex(id: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => logTime(slackNotificationService.noticeSlowRequest, durationThreshold = 1 second) { diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index a587c9d9547..e09e949d350 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -1,16 +1,11 @@ package controllers import collections.SequenceUtils - -import java.io.{BufferedOutputStream, File, FileOutputStream} -import java.util.zip.Deflater -import org.apache.pekko.actor.ActorSystem -import org.apache.pekko.stream.Materializer -import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.io.ZipIO import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits @@ -20,42 +15,42 @@ import com.scalableminds.webknossos.datastore.models.annotation.{ AnnotationLayerType, FetchedAnnotationLayer } -import com.scalableminds.webknossos.datastore.models.datasource.{ - AbstractSegmentationLayer, - DataLayerLike, - DataSourceLike, - GenericDataSource, - SegmentationLayer -} +import com.scalableminds.webknossos.datastore.models.datasource._ import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.tracingstore.tracings.TracingType import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.scalableminds.webknossos.tracingstore.tracings.volume.{ VolumeDataZipFormat, VolumeTracingDefaults, - VolumeTracingDownsampling + VolumeTracingMags } import com.typesafe.scalalogging.LazyLogging -import net.liftweb.common.Empty import javax.inject.Inject +import net.liftweb.common.Empty + import models.analytics.{AnalyticsService, DownloadAnnotationEvent, UploadAnnotationEvent} import models.annotation.AnnotationState._ import models.annotation._ import models.annotation.nml.NmlResults.{NmlParseResult, NmlParseSuccess} import models.annotation.nml.{NmlResults, NmlWriter} -import models.dataset.{DataStoreDAO, Dataset, DatasetDAO, DatasetService, WKRemoteDataStoreClient} +import models.dataset._ import models.organization.OrganizationDAO import models.project.ProjectDAO import models.task._ import models.user._ +import org.apache.pekko.actor.ActorSystem +import org.apache.pekko.stream.Materializer import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.Files.{TemporaryFile, TemporaryFileCreator} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, MultipartFormData} +import play.silhouette.api.Silhouette import security.WkEnv import utils.WkConf +import java.io.{BufferedOutputStream, File, FileOutputStream} +import java.util.zip.Deflater import scala.concurrent.ExecutionContext class AnnotationIOController @Inject()( @@ -81,6 +76,7 @@ class AnnotationIOController @Inject()( extends Controller with FoxImplicits with ProtoGeometryImplicits + with AnnotationLayerPrecedence with LazyLogging { implicit val actorSystem: ActorSystem = ActorSystem() @@ -143,7 +139,16 @@ class AnnotationIOController @Inject()( mergedSkeletonLayers ::: mergedVolumeLayers, AnnotationType.Explorational, name, - description) + description, + ObjectId.generate) + annotationProto = AnnotationProto( + description = annotation.description, + version = 0L, + annotationLayers = annotation.annotationLayers.map(_.toProto), + earliestAccessibleVersion = 0L + ) + _ <- tracingStoreClient.saveAnnotationProto(annotation._id, annotationProto) + _ <- annotationDAO.insertOne(annotation) _ = analyticsService.track(UploadAnnotationEvent(request.identity, annotation)) } yield JsonOk( @@ -183,8 +188,7 @@ class AnnotationIOController @Inject()( mergedTracingId <- client.mergeVolumeTracingsByContents( VolumeTracings(uploadedVolumeLayersFlat.map(v => VolumeTracingOpt(Some(v.tracing)))), dataSource, - uploadedVolumeLayersFlat.map(v => v.getDataZipFrom(otherFiles)), - persistTracing = true + uploadedVolumeLayersFlat.map(v => v.getDataZipFrom(otherFiles)) ) } yield List( @@ -203,8 +207,7 @@ class AnnotationIOController @Inject()( else { for { mergedTracingId <- tracingStoreClient.mergeSkeletonTracingsByContents( - SkeletonTracings(skeletonTracings.map(t => SkeletonTracingOpt(Some(t)))), - persistTracing = true) + SkeletonTracings(skeletonTracings.map(t => SkeletonTracingOpt(Some(t))))) } yield List( AnnotationLayer(mergedTracingId, @@ -327,10 +330,9 @@ class AnnotationIOController @Inject()( boundingBox = bbox, elementClass = elementClass, fallbackLayer = fallbackLayerOpt.map(_.name), - largestSegmentId = - annotationService.combineLargestSegmentIdsByPrecedence(volumeTracing.largestSegmentId, - fallbackLayerOpt.map(_.largestSegmentId)), - mags = VolumeTracingDownsampling.magsForVolumeTracing(dataSource, fallbackLayerOpt).map(vec3IntToProto), + largestSegmentId = combineLargestSegmentIdsByPrecedence(volumeTracing.largestSegmentId, + fallbackLayerOpt.map(_.largestSegmentId)), + mags = VolumeTracingMags.magsForVolumeTracing(dataSource, fallbackLayerOpt).map(vec3IntToProto), hasSegmentIndex = Some(tracingCanHaveSegmentIndex) ) } @@ -350,8 +352,7 @@ class AnnotationIOController @Inject()( // NML or Zip file containing skeleton and/or volume data of this annotation. In case of Compound annotations, multiple such annotations wrapped in another zip def download(typ: String, id: String, - skeletonVersion: Option[Long], - volumeVersion: Option[Long], + version: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => @@ -371,8 +372,7 @@ class AnnotationIOController @Inject()( id, typ, request.identity, - skeletonVersion, - volumeVersion, + version, skipVolumeData.getOrElse(false), volumeDataZipFormatParsed.getOrElse(VolumeDataZipFormat.wkw)) ?~> "annotation.download.failed" } @@ -380,27 +380,20 @@ class AnnotationIOController @Inject()( } def downloadWithoutType(id: String, - skeletonVersion: Option[Long], - volumeVersion: Option[Long], + version: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => for { - annotation <- provider.provideAnnotation(id, request.identity) - result <- download(annotation.typ.toString, - id, - skeletonVersion, - volumeVersion, - skipVolumeData, - volumeDataZipFormat)(request) + annotation <- provider.provideAnnotation(id, request.identity) ?~> "annotation.notFound" ~> NOT_FOUND + result <- download(annotation.typ.toString, id, version, skipVolumeData, volumeDataZipFormat)(request) } yield result } private def downloadExplorational(annotationId: String, typ: String, issuingUser: Option[User], - skeletonVersion: Option[Long], - volumeVersion: Option[Long], + version: Option[Long], skipVolumeData: Boolean, volumeDataZipFormat: VolumeDataZipFormat)(implicit ctx: DBAccessContext) = { @@ -410,7 +403,7 @@ class AnnotationIOController @Inject()( for { tracingStoreClient <- tracingStoreService.clientFor(dataset) fetchedAnnotationLayers <- Fox.serialCombined(annotation.skeletonAnnotationLayers)( - tracingStoreClient.getSkeletonTracing(_, skeletonVersion)) + tracingStoreClient.getSkeletonTracing(annotation._id, _, version)) user <- userService.findOneCached(annotation._user)(GlobalAccessContext) taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne) nmlStream = nmlWriter.toNmlStream( @@ -442,15 +435,16 @@ class AnnotationIOController @Inject()( tracingStoreClient <- tracingStoreService.clientFor(dataset) fetchedVolumeLayers: List[FetchedAnnotationLayer] <- Fox.serialCombined(annotation.volumeAnnotationLayers) { volumeAnnotationLayer => - tracingStoreClient.getVolumeTracing(volumeAnnotationLayer, - volumeVersion, + tracingStoreClient.getVolumeTracing(annotation._id, + volumeAnnotationLayer, + version, skipVolumeData, volumeDataZipFormat, dataset.voxelSize) } ?~> "annotation.download.fetchVolumeLayer.failed" fetchedSkeletonLayers: List[FetchedAnnotationLayer] <- Fox.serialCombined(annotation.skeletonAnnotationLayers) { skeletonAnnotationLayer => - tracingStoreClient.getSkeletonTracing(skeletonAnnotationLayer, skeletonVersion) + tracingStoreClient.getSkeletonTracing(annotation._id, skeletonAnnotationLayer, version) } ?~> "annotation.download.fetchSkeletonLayer.failed" user <- userService.findOneCached(annotation._user)(GlobalAccessContext) ?~> "annotation.download.findUser.failed" taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne(_)(GlobalAccessContext)) ?~> "task.notFound" diff --git a/app/controllers/LegacyApiController.scala b/app/controllers/LegacyApiController.scala index cb8d983285f..2631693a5bc 100644 --- a/app/controllers/LegacyApiController.scala +++ b/app/controllers/LegacyApiController.scala @@ -153,12 +153,13 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, } yield replacedResults } - def annotationInfoV8(id: String, timestamp: Long): Action[AnyContent] = sil.SecuredAction.async { implicit request => - for { - _ <- Fox.successful(logVersioned(request)) - result <- annotationController.infoWithoutType(id, timestamp)(request) - adaptedResult <- replaceInResult(addDataSetToTaskInAnnotation)(result) - } yield adaptedResult + def annotationInfoV8(id: String, timestamp: Option[Long]): Action[AnyContent] = sil.SecuredAction.async { + implicit request => + for { + _ <- Fox.successful(logVersioned(request)) + result <- annotationController.infoWithoutType(id, timestamp)(request) + adaptedResult <- replaceInResult(addDataSetToTaskInAnnotation)(result) + } yield adaptedResult } def annotationsForTaskV8(taskId: String): Action[AnyContent] = diff --git a/app/controllers/UserTokenController.scala b/app/controllers/UserTokenController.scala index 52e0b24207b..83e85db874e 100644 --- a/app/controllers/UserTokenController.scala +++ b/app/controllers/UserTokenController.scala @@ -1,6 +1,5 @@ package controllers -import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox @@ -12,9 +11,7 @@ import com.scalableminds.webknossos.datastore.services.{ UserAccessAnswer, UserAccessRequest } -import com.scalableminds.webknossos.tracingstore.tracings.TracingIds - -import javax.inject.Inject +import com.scalableminds.webknossos.tracingstore.tracings.TracingId import models.annotation._ import models.dataset.{DataStoreService, DatasetDAO, DatasetService} import models.job.JobDAO @@ -23,9 +20,11 @@ import models.user.{User, UserService} import net.liftweb.common.{Box, Full} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers, Result} +import play.silhouette.api.Silhouette import security.{RandomIDGenerator, URLSharing, WkEnv, WkSilhouetteEnvironment} import utils.WkConf +import javax.inject.Inject import scala.concurrent.ExecutionContext object RpcTokenHolder { @@ -40,11 +39,11 @@ object RpcTokenHolder { class UserTokenController @Inject()(datasetDAO: DatasetDAO, datasetService: DatasetService, - annotationDAO: AnnotationDAO, annotationPrivateLinkDAO: AnnotationPrivateLinkDAO, userService: UserService, organizationDAO: OrganizationDAO, annotationInformationProvider: AnnotationInformationProvider, + annotationStore: AnnotationStore, dataStoreService: DataStoreService, tracingStoreService: TracingStoreService, jobDAO: JobDAO, @@ -99,6 +98,8 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, handleDataSourceAccess(accessRequest.resourceId, accessRequest.mode, userBox)(sharingTokenAccessCtx) case AccessResourceType.tracing => handleTracingAccess(accessRequest.resourceId.directoryName, accessRequest.mode, userBox, token) + case AccessResourceType.annotation => + handleAnnotationAccess(accessRequest.resourceId.directoryName, accessRequest.mode, userBox, token) case AccessResourceType.jobExport => handleJobExportAccess(accessRequest.resourceId.directoryName, accessRequest.mode, userBox) case _ => @@ -160,7 +161,19 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, private def handleTracingAccess(tracingId: String, mode: AccessMode, userBox: Box[User], - token: Option[String]): Fox[UserAccessAnswer] = { + token: Option[String]): Fox[UserAccessAnswer] = + if (tracingId == TracingId.dummy) + Fox.successful(UserAccessAnswer(granted = true)) + else + for { + annotation <- annotationInformationProvider.annotationForTracing(tracingId)(GlobalAccessContext) ?~> "annotation.notFound" + result <- handleAnnotationAccess(annotation._id.toString, mode, userBox, token) + } yield result + + private def handleAnnotationAccess(annotationId: String, + mode: AccessMode, + userBox: Box[User], + token: Option[String]): Fox[UserAccessAnswer] = { // Access is explicitly checked by userBox, not by DBAccessContext, as there is no token sharing for annotations // Optionally, an accessToken can be provided which explicitly looks up the read right the private link table @@ -171,16 +184,21 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, case _ => Fox.successful(false) } - if (tracingId == TracingIds.dummyTracingId) + if (annotationId == ObjectId.dummyId.toString) { Fox.successful(UserAccessAnswer(granted = true)) - else { + } else { for { - annotation <- annotationInformationProvider.annotationForTracing(tracingId)(GlobalAccessContext) ?~> "annotation.notFound" + annotationBox <- annotationInformationProvider + .provideAnnotation(annotationId, userBox)(GlobalAccessContext) + .futureBox + annotation <- annotationBox match { + case Full(_) => annotationBox.toFox + case _ => annotationStore.findInCache(annotationId).toFox + } annotationAccessByToken <- token .map(annotationPrivateLinkDAO.findOneByAccessToken) .getOrElse(Fox.empty) .futureBox - allowedByToken = annotationAccessByToken.exists(annotation._id == _._annotation) restrictions <- annotationInformationProvider.restrictionsFor( AnnotationIdentifier(annotation.typ, annotation._id))(GlobalAccessContext) ?~> "restrictions.notFound" @@ -202,7 +220,7 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, jobBox <- jobDAO.findOne(jobIdValidated)(DBAccessContext(userBox)).futureBox answer = jobBox match { case Full(_) => UserAccessAnswer(granted = true) - case _ => UserAccessAnswer(granted = false, Some(s"No ${mode} access to job export")) + case _ => UserAccessAnswer(granted = false, Some(s"No $mode access to job export")) } } yield answer } diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 26d5c7d5f53..118b001ab68 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -1,22 +1,20 @@ package controllers import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId -import com.scalableminds.webknossos.tracingstore.TracingUpdatesReport - -import javax.inject.Inject +import com.scalableminds.webknossos.tracingstore.AnnotationUpdatesReport +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters +import com.scalableminds.webknossos.tracingstore.tracings.TracingId import models.analytics.{AnalyticsService, UpdateAnnotationEvent, UpdateAnnotationViewOnlyEvent} import models.annotation.AnnotationState._ -import models.annotation.{ - Annotation, - AnnotationDAO, - AnnotationInformationProvider, - AnnotationLayerDAO, - TracingDataSourceTemporaryStore, - TracingStoreService -} +import models.annotation._ import models.dataset.{DatasetDAO, DatasetService} import models.organization.OrganizationDAO import models.user.UserDAO @@ -24,9 +22,11 @@ import models.user.time.TimeSpanService import play.api.i18n.Messages import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} +import scalapb.GeneratedMessage import security.{WebknossosBearerTokenAuthenticatorService, WkSilhouetteEnvironment} import utils.WkConf +import javax.inject.Inject import scala.concurrent.ExecutionContext class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStoreService, @@ -37,6 +37,7 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore userDAO: UserDAO, annotationInformationProvider: AnnotationInformationProvider, analyticsService: AnalyticsService, + annotationService: AnnotationService, datasetDAO: DatasetDAO, annotationDAO: AnnotationDAO, annotationLayerDAO: AnnotationLayerDAO, @@ -50,18 +51,44 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore val bearerTokenService: WebknossosBearerTokenAuthenticatorService = wkSilhouetteEnvironment.combinedAuthenticatorService.tokenAuthenticatorService - def handleTracingUpdateReport(name: String, key: String): Action[TracingUpdatesReport] = - Action.async(validateJson[TracingUpdatesReport]) { implicit request => + def updateAnnotation(name: String, key: String, annotationId: String): Action[AnnotationProto] = + Action.async(validateProto[AnnotationProto]) { implicit request => + // tracingstore only sends this request after ensuring write access + implicit val ctx: DBAccessContext = GlobalAccessContext + tracingStoreService.validateAccess(name, key) { _ => + for { + annotationIdValidated <- ObjectId.fromString(annotationId) + existingLayers <- annotationLayerDAO.findAnnotationLayersFor(annotationIdValidated) + newLayersProto = request.body.annotationLayers + existingLayerIds = existingLayers.map(_.tracingId).toSet + newLayerIds = newLayersProto.map(_.tracingId).toSet + layerIdsToDelete = existingLayerIds.diff(newLayerIds) + layerIdsToUpdate = existingLayerIds.intersect(newLayerIds) + layerIdsToInsert = newLayerIds.diff(existingLayerIds) + _ <- Fox.serialCombined(layerIdsToDelete.toList)( + annotationLayerDAO.deleteOneByTracingId(annotationIdValidated, _)) + _ <- Fox.serialCombined(newLayersProto.filter(l => layerIdsToInsert.contains(l.tracingId))) { layerProto => + annotationLayerDAO.insertOne(annotationIdValidated, AnnotationLayer.fromProto(layerProto)) + } + _ <- Fox.serialCombined(newLayersProto.filter(l => layerIdsToUpdate.contains(l.tracingId)))(l => + annotationLayerDAO.updateName(annotationIdValidated, l.tracingId, l.name)) + // Layer stats are ignored here, they are sent eagerly when saving updates + _ <- annotationDAO.updateDescription(annotationIdValidated, request.body.description) + } yield Ok + } + } + + def handleTracingUpdateReport(name: String, key: String): Action[AnnotationUpdatesReport] = + Action.async(validateJson[AnnotationUpdatesReport]) { implicit request => implicit val ctx: DBAccessContext = GlobalAccessContext tracingStoreService.validateAccess(name, key) { _ => val report = request.body for { - annotation <- annotationDAO.findOneByTracingId(report.tracingId) + annotationId <- ObjectId.fromString(report.annotationId) + annotation <- annotationDAO.findOne(annotationId) _ <- ensureAnnotationNotFinished(annotation) _ <- annotationDAO.updateModified(annotation._id, Instant.now) - _ <- Fox.runOptional(report.statistics) { statistics => - annotationLayerDAO.updateStatistics(annotation._id, report.tracingId, statistics) - } + _ = report.statistics.map(statistics => annotationService.updateStatistics(annotation._id, statistics)) userBox <- bearerTokenService.userForTokenOpt(report.userToken).futureBox trackTime = report.significantChangesCount > 0 || !wkConf.WebKnossos.User.timeTrackingOnlyWithSignificantChanges _ <- Fox.runOptional(userBox)(user => @@ -113,6 +140,20 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore } } + def annotationIdForTracing(name: String, key: String, tracingId: String): Action[AnyContent] = + Action.async { implicit request => + tracingStoreService.validateAccess(name, key) { _ => + implicit val ctx: DBAccessContext = GlobalAccessContext + if (tracingId == TracingId.dummy) { + Fox.successful(Ok(Json.toJson(ObjectId.dummyId))) + } else { + for { + annotation <- annotationInformationProvider.annotationForTracing(tracingId) ?~> s"No annotation for tracing $tracingId" + } yield Ok(Json.toJson(annotation._id)) + } + } + } + def dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], @@ -131,4 +172,29 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore } yield Ok(Json.toJson(dataStore.url)) } } + + def createTracing(name: String, + key: String, + annotationId: String, + previousVersion: Long): Action[AnnotationLayerParameters] = + Action.async(validateJson[AnnotationLayerParameters]) { implicit request => + tracingStoreService.validateAccess(name, key) { _ => + implicit val ctx: DBAccessContext = GlobalAccessContext + for { + annotationIdValidated <- ObjectId.fromString(annotationId) + annotation <- annotationDAO.findOne(annotationIdValidated) ?~> "annotation.notFound" + dataset <- datasetDAO.findOne(annotation._dataset) + tracingEither <- annotationService.createTracingForExplorational(dataset, + request.body, + Some(annotation._id), + annotation.annotationLayers, + Some(previousVersion)) + tracing: GeneratedMessage = tracingEither match { + case Left(s: SkeletonTracing) => s + case Right(v: VolumeTracing) => v + } + } yield Ok(tracing.toByteArray).as(protobufMimeType) + } + } + } diff --git a/app/models/analytics/AnalyticsService.scala b/app/models/analytics/AnalyticsService.scala index 5df8c4a2cf8..611657b3f2a 100644 --- a/app/models/analytics/AnalyticsService.scala +++ b/app/models/analytics/AnalyticsService.scala @@ -56,6 +56,7 @@ class AnalyticsService @Inject()(rpc: RPC, } val wrappedJson = Json.obj("api_key" -> conf.key, "events" -> List(analyticsEventJson)) rpc(conf.uri).silent.postJson(wrappedJson) + () } Fox.successful(()) } diff --git a/app/models/annotation/Annotation.scala b/app/models/annotation/Annotation.scala index d94130d0d19..5be6bfb3be5 100755 --- a/app/models/annotation/Annotation.scala +++ b/app/models/annotation/Annotation.scala @@ -9,9 +9,9 @@ import com.scalableminds.webknossos.tracingstore.tracings.TracingType import models.annotation.AnnotationState._ import models.annotation.AnnotationType.AnnotationType import play.api.libs.json._ +import slick.jdbc.GetResult import slick.jdbc.GetResult._ import slick.jdbc.PostgresProfile.api._ -import slick.jdbc.GetResult import slick.jdbc.TransactionIsolation.Serializable import slick.lifted.Rep import slick.sql.SqlAction @@ -22,6 +22,11 @@ import javax.inject.Inject import scala.concurrent.ExecutionContext import scala.concurrent.duration.FiniteDuration +object AnnotationDefaults { + val defaultName: String = "" + val defaultDescription: String = "" +} + case class Annotation( _id: ObjectId, _dataset: ObjectId, @@ -29,9 +34,9 @@ case class Annotation( _team: ObjectId, _user: ObjectId, annotationLayers: List[AnnotationLayer], - description: String = "", + description: String = AnnotationDefaults.defaultDescription, visibility: AnnotationVisibility.Value = AnnotationVisibility.Internal, - name: String = "", + name: String = AnnotationDefaults.defaultName, viewConfiguration: Option[JsObject] = None, state: AnnotationState.Value = Active, isLockedByOwner: Boolean = false, @@ -140,13 +145,20 @@ class AnnotationLayerDAO @Inject()(SQLClient: SqlClient)(implicit ec: ExecutionC q"""INSERT INTO webknossos.annotation_layers(_annotation, tracingId, typ, name, statistics) VALUES($annotationId, ${a.tracingId}, ${a.typ}, ${a.name}, ${a.stats})""".asUpdate - def deleteOne(annotationId: ObjectId, layerName: String): Fox[Unit] = + def deleteOneByName(annotationId: ObjectId, layerName: String): Fox[Unit] = for { _ <- run(q"""DELETE FROM webknossos.annotation_layers WHERE _annotation = $annotationId AND name = $layerName""".asUpdate) } yield () + def deleteOneByTracingId(annotationId: ObjectId, tracingId: String): Fox[Unit] = + for { + _ <- run(q"""DELETE FROM webknossos.annotation_layers + WHERE _annotation = $annotationId + AND tracingId = $tracingId""".asUpdate) + } yield () + def findAnnotationIdByTracingId(tracingId: String): Fox[ObjectId] = for { rList <- run(q"SELECT _annotation FROM webknossos.annotation_layers WHERE tracingId = $tracingId".as[ObjectId]) @@ -180,7 +192,7 @@ class AnnotationLayerDAO @Inject()(SQLClient: SqlClient)(implicit ec: ExecutionC def deleteAllForAnnotationQuery(annotationId: ObjectId): SqlAction[Int, NoStream, Effect] = q"DELETE FROM webknossos.annotation_layers WHERE _annotation = $annotationId".asUpdate - def updateStatistics(annotationId: ObjectId, tracingId: String, statistics: JsObject): Fox[Unit] = + def updateStatistics(annotationId: ObjectId, tracingId: String, statistics: JsValue): Fox[Unit] = for { _ <- run(q"""UPDATE webknossos.annotation_layers SET statistics = $statistics @@ -517,6 +529,18 @@ class AnnotationDAO @Inject()(sqlClient: SqlClient, annotationLayerDAO: Annotati AND a.typ = ${AnnotationType.Task} """.as[ObjectId]) } yield r.toList + def findBaseIdForTask(taskId: ObjectId)(implicit ctx: DBAccessContext): Fox[ObjectId] = + for { + accessQuery <- readAccessQuery + r <- run(q"""SELECT _id + FROM $existingCollectionName + WHERE _task = $taskId + AND typ = ${AnnotationType.TracingBase} + AND state != ${AnnotationState.Cancelled} + AND $accessQuery""".as[ObjectId]) + firstRow <- r.headOption + } yield firstRow + def findAllByTaskIdAndType(taskId: ObjectId, typ: AnnotationType)( implicit ctx: DBAccessContext): Fox[List[Annotation]] = for { diff --git a/app/models/annotation/AnnotationInformationProvider.scala b/app/models/annotation/AnnotationInformationProvider.scala index 9d7e4dd35f9..bf25256e291 100755 --- a/app/models/annotation/AnnotationInformationProvider.scala +++ b/app/models/annotation/AnnotationInformationProvider.scala @@ -2,14 +2,13 @@ package models.annotation import com.scalableminds.util.accesscontext.DBAccessContext import com.scalableminds.util.tools.{Fox, FoxImplicits} - -import javax.inject.Inject import models.annotation.AnnotationType.AnnotationType import models.annotation.handler.AnnotationInformationHandlerSelector import models.user.User import net.liftweb.common.Full import com.scalableminds.util.objectid.ObjectId +import javax.inject.Inject import scala.concurrent.ExecutionContext class AnnotationInformationProvider @Inject()( diff --git a/app/models/annotation/AnnotationLayerPrecedence.scala b/app/models/annotation/AnnotationLayerPrecedence.scala new file mode 100644 index 00000000000..b5c94983966 --- /dev/null +++ b/app/models/annotation/AnnotationLayerPrecedence.scala @@ -0,0 +1,153 @@ +package models.annotation + +import com.scalableminds.util.objectid.ObjectId +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.geometry.{ + AdditionalCoordinateProto, + NamedBoundingBoxProto, + Vec3DoubleProto, + Vec3IntProto +} +import com.scalableminds.webknossos.datastore.models.annotation.{ + AnnotationLayer, + AnnotationLayerType, + FetchedAnnotationLayer +} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{VolumeDataZipFormat, VolumeTracingDefaults} +import models.dataset.Dataset + +import scala.concurrent.ExecutionContext + +// Used to pass duplicate properties when creating a new tracing to avoid masking them. +case class RedundantTracingProperties( + editPosition: Vec3IntProto, + editRotation: Vec3DoubleProto, + zoomLevel: Double, + userBoundingBoxes: Seq[NamedBoundingBoxProto], + editPositionAdditionalCoordinates: Seq[AdditionalCoordinateProto], +) + +trait AnnotationLayerPrecedence { + + protected def combineLargestSegmentIdsByPrecedence(fromNml: Option[Long], + fromFallbackLayer: Option[Option[Long]]): Option[Long] = + if (fromNml.nonEmpty) + // This was called for an NML upload. The NML had an explicit largestSegmentId. Use that. + fromNml + else if (fromFallbackLayer.nonEmpty) + // There is a fallback layer. Use its largestSegmentId, even if it is None. + // Some tracing functionality will be disabled until a segment id is set by the user. + fromFallbackLayer.flatten + else { + // There is no fallback layer. Start at default segment id for fresh volume layers + VolumeTracingDefaults.largestSegmentId + } + + protected def adaptSkeletonTracing( + skeletonTracing: SkeletonTracing, + oldPrecedenceLayerProperties: Option[RedundantTracingProperties]): SkeletonTracing = + oldPrecedenceLayerProperties.map { p: RedundantTracingProperties => + skeletonTracing.copy( + editPosition = p.editPosition, + editRotation = p.editRotation, + zoomLevel = p.zoomLevel, + userBoundingBoxes = p.userBoundingBoxes, + editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates + ) + }.getOrElse(skeletonTracing) + + protected def adaptVolumeTracing(volumeTracing: VolumeTracing, + oldPrecedenceLayerProperties: Option[RedundantTracingProperties]): VolumeTracing = + oldPrecedenceLayerProperties.map { p: RedundantTracingProperties => + volumeTracing.copy( + editPosition = p.editPosition, + editRotation = p.editRotation, + zoomLevel = p.zoomLevel, + userBoundingBoxes = p.userBoundingBoxes, + editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates + ) + }.getOrElse(volumeTracing) + + protected def getOldPrecedenceLayerProperties(existingAnnotationId: Option[ObjectId], + existingAnnotationLayers: List[AnnotationLayer], + previousVersion: Option[Long], + dataset: Dataset, + tracingStoreClient: WKRemoteTracingStoreClient)( + implicit ec: ExecutionContext): Fox[Option[RedundantTracingProperties]] = + for { + oldPrecedenceLayer <- fetchOldPrecedenceLayer(existingAnnotationId, + existingAnnotationLayers, + previousVersion, + dataset, + tracingStoreClient) + oldPrecedenceLayerProperties: Option[RedundantTracingProperties] = oldPrecedenceLayer.map( + extractPrecedenceProperties) + } yield oldPrecedenceLayerProperties + + // If there is more than one tracing, select the one that has precedence for the parameters (they should be identical anyway) + protected def selectLayerWithPrecedenceFetched( + skeletonLayers: List[FetchedAnnotationLayer], + volumeLayers: List[FetchedAnnotationLayer])(implicit ec: ExecutionContext): Fox[FetchedAnnotationLayer] = + if (skeletonLayers.nonEmpty) { + Fox.successful(skeletonLayers.minBy(_.tracingId)) + } else if (volumeLayers.nonEmpty) { + Fox.successful(volumeLayers.minBy(_.tracingId)) + } else Fox.failure("annotation.download.noLayers") + + private def selectLayerWithPrecedence(annotationLayers: List[AnnotationLayer])( + implicit ec: ExecutionContext): Fox[AnnotationLayer] = { + val skeletonLayers = annotationLayers.filter(_.typ == AnnotationLayerType.Skeleton) + val volumeLayers = annotationLayers.filter(_.typ == AnnotationLayerType.Volume) + if (skeletonLayers.nonEmpty) { + Fox.successful(skeletonLayers.minBy(_.tracingId)) + } else if (volumeLayers.nonEmpty) { + Fox.successful(volumeLayers.minBy(_.tracingId)) + } else Fox.failure("Trying to select precedence layer from empty layer list.") + } + + private def fetchOldPrecedenceLayer(existingAnnotationIdOpt: Option[ObjectId], + existingAnnotationLayers: List[AnnotationLayer], + previousVersion: Option[Long], + dataset: Dataset, + tracingStoreClient: WKRemoteTracingStoreClient)( + implicit ec: ExecutionContext): Fox[Option[FetchedAnnotationLayer]] = + if (existingAnnotationLayers.isEmpty) Fox.successful(None) + else + for { + existingAnnotationId <- existingAnnotationIdOpt.toFox ?~> "fetchOldPrecedenceLayer.needsAnnotationId" + oldPrecedenceLayer <- selectLayerWithPrecedence(existingAnnotationLayers) + oldPrecedenceLayerFetched <- if (oldPrecedenceLayer.typ == AnnotationLayerType.Skeleton) + tracingStoreClient.getSkeletonTracing(existingAnnotationId, oldPrecedenceLayer, previousVersion) + else + tracingStoreClient.getVolumeTracing(existingAnnotationId, + oldPrecedenceLayer, + previousVersion, + skipVolumeData = true, + volumeDataZipFormat = VolumeDataZipFormat.wkw, + dataset.voxelSize) + } yield Some(oldPrecedenceLayerFetched) + + private def extractPrecedenceProperties(oldPrecedenceLayer: FetchedAnnotationLayer): RedundantTracingProperties = + oldPrecedenceLayer.tracing match { + case Left(s) => + RedundantTracingProperties( + s.editPosition, + s.editRotation, + s.zoomLevel, + s.userBoundingBoxes ++ s.userBoundingBox.map( + com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto(0, None, None, None, _)), + s.editPositionAdditionalCoordinates + ) + case Right(v) => + RedundantTracingProperties( + v.editPosition, + v.editRotation, + v.zoomLevel, + v.userBoundingBoxes ++ v.userBoundingBox.map( + com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto(0, None, None, None, _)), + v.editPositionAdditionalCoordinates + ) + } +} diff --git a/app/models/annotation/AnnotationMerger.scala b/app/models/annotation/AnnotationMerger.scala index 48aedf4a9da..36c6c5fbf79 100644 --- a/app/models/annotation/AnnotationMerger.scala +++ b/app/models/annotation/AnnotationMerger.scala @@ -2,11 +2,7 @@ package models.annotation import com.scalableminds.util.accesscontext.DBAccessContext import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.annotation.{ - AnnotationLayer, - AnnotationLayerStatistics, - AnnotationLayerType -} +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.typesafe.scalalogging.LazyLogging import javax.inject.Inject @@ -25,12 +21,11 @@ class AnnotationMerger @Inject()(datasetDAO: DatasetDAO, tracingStoreService: Tr def mergeTwo( annotationA: Annotation, annotationB: Annotation, - persistTracing: Boolean, issuingUser: User )(implicit ctx: DBAccessContext): Fox[Annotation] = mergeN( ObjectId.generate, - persistTracing, + toTemporaryStore = false, issuingUser._id, annotationB._dataset, annotationB._team, @@ -40,7 +35,7 @@ class AnnotationMerger @Inject()(datasetDAO: DatasetDAO, tracingStoreService: Tr def mergeN( newId: ObjectId, - persistTracing: Boolean, + toTemporaryStore: Boolean, userId: ObjectId, datasetId: ObjectId, teamId: ObjectId, @@ -51,7 +46,7 @@ class AnnotationMerger @Inject()(datasetDAO: DatasetDAO, tracingStoreService: Tr Fox.empty else { for { - mergedAnnotationLayers <- mergeTracingsOfAnnotations(annotations, datasetId, persistTracing) + mergedAnnotationLayers <- mergeAnnotationsInTracingstore(annotations, datasetId, newId, toTemporaryStore) ?~> "Failed to merge annotations in tracingstore." } yield { Annotation( newId, @@ -65,56 +60,18 @@ class AnnotationMerger @Inject()(datasetDAO: DatasetDAO, tracingStoreService: Tr } } - private def mergeTracingsOfAnnotations(annotations: List[Annotation], datasetId: ObjectId, persistTracing: Boolean)( - implicit ctx: DBAccessContext): Fox[List[AnnotationLayer]] = + private def mergeAnnotationsInTracingstore( + annotations: List[Annotation], + datasetId: ObjectId, + newAnnotationId: ObjectId, + toTemporaryStore: Boolean)(implicit ctx: DBAccessContext): Fox[List[AnnotationLayer]] = for { dataset <- datasetDAO.findOne(datasetId) tracingStoreClient: WKRemoteTracingStoreClient <- tracingStoreService.clientFor(dataset) - skeletonLayers = annotations.flatMap(_.annotationLayers.find(_.typ == AnnotationLayerType.Skeleton)) - volumeLayers = annotations.flatMap(_.annotationLayers.find(_.typ == AnnotationLayerType.Volume)) - mergedSkeletonTracingId <- mergeSkeletonTracings(tracingStoreClient, - skeletonLayers.map(_.tracingId), - persistTracing) - mergedVolumeTracingId <- mergeVolumeTracings(tracingStoreClient, volumeLayers.map(_.tracingId), persistTracing) - mergedSkeletonName = allEqual(skeletonLayers.map(_.name)) - mergedVolumeName = allEqual(volumeLayers.map(_.name)) - mergedSkeletonLayer = mergedSkeletonTracingId.map( - id => - AnnotationLayer(id, - AnnotationLayerType.Skeleton, - mergedSkeletonName.getOrElse(AnnotationLayer.defaultSkeletonLayerName), - AnnotationLayerStatistics.unknown)) - mergedVolumeLayer = mergedVolumeTracingId.map( - id => - AnnotationLayer(id, - AnnotationLayerType.Volume, - mergedVolumeName.getOrElse(AnnotationLayer.defaultVolumeLayerName), - AnnotationLayerStatistics.unknown)) - } yield List(mergedSkeletonLayer, mergedVolumeLayer).flatten - - private def allEqual(str: List[String]): Option[String] = - // returns the str if all names are equal, None otherwise - str.headOption.map(name => str.forall(_ == name)).flatMap { _ => - str.headOption - } - - private def mergeSkeletonTracings(tracingStoreClient: WKRemoteTracingStoreClient, - skeletonTracingIds: List[String], - persistTracing: Boolean) = - if (skeletonTracingIds.isEmpty) - Fox.successful(None) - else - tracingStoreClient - .mergeSkeletonTracingsByIds(skeletonTracingIds, persistTracing) - .map(Some(_)) ?~> "Failed to merge skeleton tracings." + mergedAnnotationProto <- tracingStoreClient.mergeAnnotationsByIds(annotations.map(_.id), + newAnnotationId, + toTemporaryStore) + layers = mergedAnnotationProto.annotationLayers.map(AnnotationLayer.fromProto) + } yield layers.toList - private def mergeVolumeTracings(tracingStoreClient: WKRemoteTracingStoreClient, - volumeTracingIds: List[String], - persistTracing: Boolean) = - if (volumeTracingIds.isEmpty) - Fox.successful(None) - else - tracingStoreClient - .mergeVolumeTracingsByIds(volumeTracingIds, persistTracing) - .map(Some(_)) ?~> "Failed to merge volume tracings." } diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 3b333d7bf78..c6ee592db24 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -1,31 +1,18 @@ package models.annotation -import org.apache.pekko.actor.ActorSystem -import org.apache.pekko.stream.Materializer import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, DBAccessContext, GlobalAccessContext} import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.{NamedStream, ZipIO} import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{BoxImplicits, Fox, FoxImplicits, TextUtils} +import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationProto} import com.scalableminds.webknossos.datastore.SkeletonTracing._ import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} -import com.scalableminds.webknossos.datastore.geometry.{ - AdditionalCoordinateProto, - ColorProto, - NamedBoundingBoxProto, - Vec3DoubleProto, - Vec3IntProto -} +import com.scalableminds.webknossos.datastore.geometry.ColorProto import com.scalableminds.webknossos.datastore.helpers.{NodeDefaults, ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.VoxelSize -import com.scalableminds.webknossos.datastore.models.annotation.{ - AnnotationLayer, - AnnotationLayerStatistics, - AnnotationLayerType, - AnnotationSource, - FetchedAnnotationLayer -} +import com.scalableminds.webknossos.datastore.models.annotation._ import com.scalableminds.webknossos.datastore.models.datasource.{ AdditionalAxis, ElementClass, @@ -33,16 +20,14 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ SegmentationLayerLike => SegmentationLayer } import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.tracingstore.tracings._ +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.scalableminds.webknossos.tracingstore.tracings.volume.{ MagRestrictions, - VolumeDataZipFormat, VolumeTracingDefaults, - VolumeTracingDownsampling + VolumeTracingMags } import com.typesafe.scalalogging.LazyLogging -import controllers.AnnotationLayerParameters import models.annotation.AnnotationState._ import models.annotation.AnnotationType.AnnotationType import models.annotation.handler.SavedTracingInformationHandler @@ -54,6 +39,8 @@ import models.task.{Task, TaskDAO, TaskService, TaskTypeDAO} import models.team.{TeamDAO, TeamService} import models.user.{User, UserDAO, UserService} import net.liftweb.common.{Box, Full} +import org.apache.pekko.actor.ActorSystem +import org.apache.pekko.stream.Materializer import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.Files.{TemporaryFile, TemporaryFileCreator} import play.api.libs.json.{JsNull, JsObject, JsValue, Json} @@ -77,21 +64,11 @@ case class DownloadAnnotation(skeletonTracingIdOpt: Option[String], datasetName: String, datasetId: ObjectId) -// Used to pass duplicate properties when creating a new tracing to avoid masking them. -// Uses the proto-generated geometry classes, hence the full qualifiers. -case class RedundantTracingProperties( - editPosition: Vec3IntProto, - editRotation: Vec3DoubleProto, - zoomLevel: Double, - userBoundingBoxes: Seq[NamedBoundingBoxProto], - editPositionAdditionalCoordinates: Seq[AdditionalCoordinateProto], -) - class AnnotationService @Inject()( annotationInformationProvider: AnnotationInformationProvider, savedTracingInformationHandler: SavedTracingInformationHandler, annotationDAO: AnnotationDAO, - annotationLayersDAO: AnnotationLayerDAO, + annotationLayerDAO: AnnotationLayerDAO, userDAO: UserDAO, taskTypeDAO: TaskTypeDAO, taskService: TaskService, @@ -116,7 +93,9 @@ class AnnotationService @Inject()( extends BoxImplicits with FoxImplicits with ProtoGeometryImplicits + with AnnotationLayerPrecedence with LazyLogging { + implicit val actorSystem: ActorSystem = ActorSystem() val DefaultAnnotationListLimit = 1000 @@ -149,7 +128,7 @@ class AnnotationService @Inject()( magRestrictions: MagRestrictions, mappingName: Option[String] ): Fox[VolumeTracing] = { - val mags = VolumeTracingDownsampling.magsForVolumeTracing(dataSource, fallbackLayer) + val mags = VolumeTracingMags.magsForVolumeTracing(dataSource, fallbackLayer) val magsRestricted = magRestrictions.filterAllowed(mags) val additionalAxes = fallbackLayer.map(_.additionalAxes).getOrElse(dataSource.additionalAxesUnion) @@ -184,56 +163,21 @@ class AnnotationService @Inject()( ) } - def combineLargestSegmentIdsByPrecedence(fromNml: Option[Long], - fromFallbackLayer: Option[Option[Long]]): Option[Long] = - if (fromNml.nonEmpty) - // This was called for an NML upload. The NML had an explicit largestSegmentId. Use that. - fromNml - else if (fromFallbackLayer.nonEmpty) - // There is a fallback layer. Use its largestSegmentId, even if it is None. - // Some tracing functionality will be disabled until a segment id is set by the user. - fromFallbackLayer.flatten - else { - // There is no fallback layer. Start at default segment id for fresh volume layers - VolumeTracingDefaults.largestSegmentId - } - - def addAnnotationLayer(annotation: Annotation, - organizationId: String, - annotationLayerParameters: AnnotationLayerParameters)(implicit ctx: DBAccessContext, - mp: MessagesProvider): Fox[Unit] = - for { - dataset <- datasetDAO.findOne(annotation._dataset) ?~> "dataset.notFoundForAnnotation" - dataSource <- datasetService.dataSourceFor(dataset).flatMap(_.toUsable) ?~> "dataSource.notFound" - newAnnotationLayers <- createTracingsForExplorational( - dataset, - dataSource, - List(annotationLayerParameters), - organizationId, - annotation.annotationLayers) ?~> "annotation.createTracings.failed" - _ <- annotationLayersDAO.insertForAnnotation(annotation._id, newAnnotationLayers) - } yield () - - def deleteAnnotationLayer(annotation: Annotation, layerName: String): Fox[Unit] = - for { - _ <- annotationLayersDAO.deleteOne(annotation._id, layerName) - } yield () - - private def createTracingsForExplorational(dataset: Dataset, - dataSource: DataSource, - allAnnotationLayerParameters: List[AnnotationLayerParameters], - datasetOrganizationId: String, - existingAnnotationLayers: List[AnnotationLayer] = List())( + def createTracingForExplorational(dataset: Dataset, + params: AnnotationLayerParameters, + existingAnnotationId: Option[ObjectId], + existingAnnotationLayers: List[AnnotationLayer], + previousVersion: Option[Long])( implicit ctx: DBAccessContext, - mp: MessagesProvider): Fox[List[AnnotationLayer]] = { + mp: MessagesProvider): Fox[Either[SkeletonTracing, VolumeTracing]] = { - def getAutoFallbackLayerName: Option[String] = + def getAutoFallbackLayerName(dataSource: DataSource): Option[String] = dataSource.dataLayers.find { case _: SegmentationLayer => true case _ => false }.map(_.name) - def getFallbackLayer(fallbackLayerName: String): Fox[SegmentationLayer] = + def getFallbackLayer(dataSource: DataSource, fallbackLayerName: String): Fox[SegmentationLayer] = for { fallbackLayer <- dataSource.dataLayers .filter(dl => dl.name == fallbackLayerName) @@ -251,109 +195,12 @@ class AnnotationService @Inject()( fallbackLayer.elementClass) } yield fallbackLayer - def createAndSaveAnnotationLayer(annotationLayerParameters: AnnotationLayerParameters, - oldPrecedenceLayerProperties: Option[RedundantTracingProperties], - dataStore: DataStore): Fox[AnnotationLayer] = - for { - client <- tracingStoreService.clientFor(dataset) - tracingIdAndName <- annotationLayerParameters.typ match { - case AnnotationLayerType.Skeleton => - val skeleton = SkeletonTracingDefaults.createInstance.copy( - datasetName = dataset.name, - editPosition = dataSource.center, - organizationId = Some(datasetOrganizationId), - additionalAxes = AdditionalAxis.toProto(dataSource.additionalAxesUnion) - ) - val skeletonAdapted = oldPrecedenceLayerProperties.map { p => - skeleton.copy( - editPosition = p.editPosition, - editRotation = p.editRotation, - zoomLevel = p.zoomLevel, - userBoundingBoxes = p.userBoundingBoxes, - editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates - ) - }.getOrElse(skeleton) - for { - tracingId <- client.saveSkeletonTracing(skeletonAdapted) - name = annotationLayerParameters.name.getOrElse( - AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) - } yield (tracingId, name) - case AnnotationLayerType.Volume => - val autoFallbackLayerName = - if (annotationLayerParameters.autoFallbackLayer) getAutoFallbackLayerName else None - val fallbackLayerName = annotationLayerParameters.fallbackLayerName.orElse(autoFallbackLayerName) - for { - fallbackLayer <- Fox.runOptional(fallbackLayerName)(getFallbackLayer) - volumeTracing <- createVolumeTracing( - dataSource, - datasetOrganizationId, - dataStore, - fallbackLayer, - magRestrictions = annotationLayerParameters.magRestrictions.getOrElse(MagRestrictions.empty), - mappingName = annotationLayerParameters.mappingName, - ) - volumeTracingAdapted = oldPrecedenceLayerProperties.map { p => - volumeTracing.copy( - editPosition = p.editPosition, - editRotation = p.editRotation, - zoomLevel = p.zoomLevel, - userBoundingBoxes = p.userBoundingBoxes, - editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates - ) - }.getOrElse(volumeTracing) - volumeTracingId <- client.saveVolumeTracing(volumeTracingAdapted, dataSource = Some(dataSource)) - name = annotationLayerParameters.name - .orElse(autoFallbackLayerName) - .getOrElse(AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) - } yield (volumeTracingId, name) - case _ => - Fox.failure(s"Unknown AnnotationLayerType: ${annotationLayerParameters.typ}") - } - } yield - AnnotationLayer(tracingIdAndName._1, - annotationLayerParameters.typ, - tracingIdAndName._2, - AnnotationLayerStatistics.zeroedForTyp(annotationLayerParameters.typ)) - - def fetchOldPrecedenceLayer: Fox[Option[FetchedAnnotationLayer]] = - if (existingAnnotationLayers.isEmpty) Fox.successful(None) - else - for { - oldPrecedenceLayer <- selectLayerWithPrecedence(existingAnnotationLayers) - tracingStoreClient <- tracingStoreService.clientFor(dataset) - oldPrecedenceLayerFetched <- if (oldPrecedenceLayer.typ == AnnotationLayerType.Skeleton) - tracingStoreClient.getSkeletonTracing(oldPrecedenceLayer, None) - else - tracingStoreClient.getVolumeTracing(oldPrecedenceLayer, - None, - skipVolumeData = true, - volumeDataZipFormat = VolumeDataZipFormat.wkw, - dataset.voxelSize) - } yield Some(oldPrecedenceLayerFetched) - - def extractPrecedenceProperties(oldPrecedenceLayer: FetchedAnnotationLayer): RedundantTracingProperties = - oldPrecedenceLayer.tracing match { - case Left(s) => - RedundantTracingProperties( - s.editPosition, - s.editRotation, - s.zoomLevel, - s.userBoundingBoxes ++ s.userBoundingBox.map( - com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto(0, None, None, None, _)), - s.editPositionAdditionalCoordinates - ) - case Right(v) => - RedundantTracingProperties( - v.editPosition, - v.editRotation, - v.zoomLevel, - v.userBoundingBoxes ++ v.userBoundingBox.map( - com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto(0, None, None, None, _)), - v.editPositionAdditionalCoordinates - ) - } - for { + dataStore <- dataStoreDAO.findOneByName(dataset._dataStore.trim) ?~> "dataStore.notFoundForDataset" + inboxDataSource <- datasetService.dataSourceFor(dataset) + dataSource <- inboxDataSource.toUsable ?~> Messages("dataset.notImported", inboxDataSource.id.directoryName) + tracingStoreClient <- tracingStoreService.clientFor(dataset) + /* Note that the tracings have redundant properties, with a precedence logic selecting a layer from which the values are used. Adding a layer may change this precedence, so the redundant @@ -363,27 +210,82 @@ class AnnotationService @Inject()( We do this for *every* new layer, since we only later get its ID which determines the actual precedence. All of this is skipped if existingAnnotationLayers is empty. */ - oldPrecedenceLayer <- fetchOldPrecedenceLayer - dataStore <- dataStoreDAO.findOneByName(dataset._dataStore.trim) ?~> "dataStore.notFoundForDataset" - precedenceProperties = oldPrecedenceLayer.map(extractPrecedenceProperties) - newAnnotationLayers <- Fox.serialCombined(allAnnotationLayerParameters)(p => - createAndSaveAnnotationLayer(p, precedenceProperties, dataStore)) - } yield newAnnotationLayers + oldPrecedenceLayerProperties <- getOldPrecedenceLayerProperties(existingAnnotationId, + existingAnnotationLayers, + previousVersion, + dataset, + tracingStoreClient) + tracing <- params.typ match { + case AnnotationLayerType.Skeleton => + val skeleton = SkeletonTracingDefaults.createInstance.copy( + datasetName = dataset.name, + editPosition = dataSource.center, + organizationId = Some(dataset._organization), + additionalAxes = AdditionalAxis.toProto(dataSource.additionalAxesUnion) + ) + val skeletonAdapted = adaptSkeletonTracing(skeleton, oldPrecedenceLayerProperties) + Fox.successful(Left(skeletonAdapted)) + case AnnotationLayerType.Volume => + val autoFallbackLayerName = + if (params.autoFallbackLayer) getAutoFallbackLayerName(dataSource) else None + val fallbackLayerName = params.fallbackLayerName.orElse(autoFallbackLayerName) + for { + fallbackLayer <- Fox.runOptional(fallbackLayerName)(n => getFallbackLayer(dataSource, n)) + volumeTracing <- createVolumeTracing( + dataSource, + dataset._organization, + dataStore, + fallbackLayer, + magRestrictions = params.magRestrictions.getOrElse(MagRestrictions.empty), + mappingName = params.mappingName + ) + volumeTracingAdapted = adaptVolumeTracing(volumeTracing, oldPrecedenceLayerProperties) + } yield Right(volumeTracingAdapted) + } + } yield tracing } - /* - If there is more than one tracing, select the one that has precedence for the parameters (they should be identical anyway) - This needs to match the code in NmlWriter’s selectLayerWithPrecedence, though the types are different - */ - private def selectLayerWithPrecedence(annotationLayers: List[AnnotationLayer]): Fox[AnnotationLayer] = { - val skeletonLayers = annotationLayers.filter(_.typ == AnnotationLayerType.Skeleton) - val volumeLayers = annotationLayers.filter(_.typ == AnnotationLayerType.Volume) - if (skeletonLayers.nonEmpty) { - Fox.successful(skeletonLayers.minBy(_.tracingId)) - } else if (volumeLayers.nonEmpty) { - Fox.successful(volumeLayers.minBy(_.tracingId)) - } else Fox.failure("Trying to select precedence layer from empty layer list.") - } + private def createLayersForExplorational(dataset: Dataset, + annotationId: ObjectId, + allAnnotationLayerParameters: List[AnnotationLayerParameters])( + implicit ctx: DBAccessContext, + mp: MessagesProvider): Fox[List[AnnotationLayer]] = + for { + tracingStoreClient <- tracingStoreService.clientFor(dataset) + newAnnotationLayers <- Fox.serialCombined(allAnnotationLayerParameters) { annotationLayerParameters => + for { + tracing <- createTracingForExplorational(dataset, + annotationLayerParameters, + existingAnnotationId = None, + existingAnnotationLayers = List.empty, + previousVersion = None) + layerName = annotationLayerParameters.name.getOrElse( + AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) + tracingId <- tracing match { + case Left(skeleton) => tracingStoreClient.saveSkeletonTracing(skeleton) + case Right(volume) => tracingStoreClient.saveVolumeTracing(volume) + } + } yield + AnnotationLayer(tracingId, + annotationLayerParameters.typ, + layerName, + AnnotationLayerStatistics.zeroedForType(annotationLayerParameters.typ)) + } + layersProto = newAnnotationLayers.map { l => + AnnotationLayerProto( + l.tracingId, + l.name, + AnnotationLayerType.toProto(l.typ) + ) + } + annotationProto = AnnotationProto( + description = AnnotationDefaults.defaultDescription, + version = 0L, + annotationLayers = layersProto, + earliestAccessibleVersion = 0L + ) + _ <- tracingStoreClient.saveAnnotationProto(annotationId, annotationProto) + } yield newAnnotationLayers def createExplorationalFor(user: User, datasetId: ObjectId, @@ -392,52 +294,13 @@ class AnnotationService @Inject()( m: MessagesProvider): Fox[Annotation] = for { dataset <- datasetDAO.findOne(datasetId) ?~> "dataset.noAccessById" - dataSource <- datasetService.dataSourceFor(dataset) - datasetOrganization <- organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> "organization.notFound" - usableDataSource <- dataSource.toUsable ?~> Messages("dataset.notImported", dataSource.id.directoryName) - annotationLayers <- createTracingsForExplorational(dataset, - usableDataSource, - annotationLayerParameters, - datasetOrganization._id) ?~> "annotation.createTracings.failed" + newAnnotationId = ObjectId.generate + annotationLayers <- createLayersForExplorational(dataset, newAnnotationId, annotationLayerParameters) ?~> "annotation.createTracings.failed" teamId <- selectSuitableTeam(user, dataset) ?~> "annotation.create.forbidden" - annotation = Annotation(ObjectId.generate, datasetId, None, teamId, user._id, annotationLayers) + annotation = Annotation(newAnnotationId, datasetId, None, teamId, user._id, annotationLayers) _ <- annotationDAO.insertOne(annotation) } yield annotation - def makeAnnotationHybrid(annotation: Annotation, organizationId: String, fallbackLayerName: Option[String])( - implicit ctx: DBAccessContext, - mp: MessagesProvider): Fox[Unit] = - for { - newAnnotationLayerType <- annotation.tracingType match { - case TracingType.skeleton => Fox.successful(AnnotationLayerType.Volume) - case TracingType.volume => Fox.successful(AnnotationLayerType.Skeleton) - case _ => Fox.failure("annotation.makeHybrid.alreadyHybrid") - } - usedFallbackLayerName = if (newAnnotationLayerType == AnnotationLayerType.Volume) fallbackLayerName else None - newAnnotationLayerParameters = AnnotationLayerParameters( - newAnnotationLayerType, - usedFallbackLayerName, - autoFallbackLayer = false, - None, - Some(MagRestrictions.empty), - Some(AnnotationLayer.defaultNameForType(newAnnotationLayerType)), - None - ) - _ <- addAnnotationLayer(annotation, organizationId, newAnnotationLayerParameters) ?~> "makeHybrid.createTracings.failed" - } yield () - - def downsampleAnnotation(annotation: Annotation, volumeAnnotationLayer: AnnotationLayer)( - implicit ctx: DBAccessContext): Fox[Unit] = - for { - dataset <- datasetDAO.findOne(annotation._dataset) ?~> "dataset.notFoundForAnnotation" - _ <- bool2Fox(volumeAnnotationLayer.typ == AnnotationLayerType.Volume) ?~> "annotation.downsample.volumeOnly" - rpcClient <- tracingStoreService.clientFor(dataset) - newVolumeTracingId <- rpcClient.duplicateVolumeTracing(volumeAnnotationLayer.tracingId, downsample = true) - _ = logger.info( - s"Replacing volume tracing ${volumeAnnotationLayer.tracingId} by downsampled copy $newVolumeTracingId for annotation ${annotation._id}.") - _ <- annotationLayersDAO.replaceTracingId(annotation._id, volumeAnnotationLayer.tracingId, newVolumeTracingId) - } yield () - // WARNING: needs to be repeatable, might be called multiple times for an annotation def finish(annotation: Annotation, user: User, restrictions: AnnotationRestrictions)( implicit ctx: DBAccessContext): Fox[String] = { @@ -477,54 +340,39 @@ class AnnotationService @Inject()( }).flatten } - private def baseForTask(taskId: ObjectId)(implicit ctx: DBAccessContext): Fox[Annotation] = - (for { - list <- annotationDAO.findAllByTaskIdAndType(taskId, AnnotationType.TracingBase) - } yield list.headOption.toFox).flatten - def annotationsFor(taskId: ObjectId)(implicit ctx: DBAccessContext): Fox[List[Annotation]] = annotationDAO.findAllByTaskIdAndType(taskId, AnnotationType.Task) - private def tracingsFromBase(annotationBase: Annotation, dataset: Dataset)( - implicit ctx: DBAccessContext, - m: MessagesProvider): Fox[(Option[String], Option[String])] = - for { - _ <- bool2Fox(dataset.isUsable) ?~> Messages("dataset.notImported", dataset.name) - tracingStoreClient <- tracingStoreService.clientFor(dataset) - baseSkeletonIdOpt <- annotationBase.skeletonTracingId - baseVolumeIdOpt <- annotationBase.volumeTracingId - newSkeletonId: Option[String] <- Fox.runOptional(baseSkeletonIdOpt)(skeletonId => - tracingStoreClient.duplicateSkeletonTracing(skeletonId)) - newVolumeId: Option[String] <- Fox.runOptional(baseVolumeIdOpt)(volumeId => - tracingStoreClient.duplicateVolumeTracing(volumeId)) - } yield (newSkeletonId, newVolumeId) - def createAnnotationFor(user: User, taskId: ObjectId, initializingAnnotationId: ObjectId)( implicit m: MessagesProvider, - ctx: DBAccessContext): Fox[Annotation] = { - def useAsTemplateAndInsert(annotation: Annotation) = - for { - datasetName <- datasetDAO.getNameById(annotation._dataset)(GlobalAccessContext) ?~> "dataset.notFoundForAnnotation" - dataset <- datasetDAO.findOne(annotation._dataset) ?~> Messages("dataset.noAccess", datasetName) - (newSkeletonId, newVolumeId) <- tracingsFromBase(annotation, dataset) ?~> s"Failed to use annotation base as template for task $taskId with annotation base ${annotation._id}" - annotationLayers <- AnnotationLayer.layersFromIds(newSkeletonId, newVolumeId) - newAnnotation = annotation.copy( - _id = initializingAnnotationId, - _user = user._id, - annotationLayers = annotationLayers, - state = Active, - typ = AnnotationType.Task, - created = Instant.now, - modified = Instant.now - ) - _ <- annotationDAO.updateInitialized(newAnnotation) - } yield newAnnotation - + ctx: DBAccessContext): Fox[Annotation] = for { - annotationBase <- baseForTask(taskId) ?~> "Failed to retrieve annotation base." - result <- useAsTemplateAndInsert(annotationBase).toFox - } yield result - } + annotationBaseId <- annotationDAO.findBaseIdForTask(taskId) ?~> "Failed to retrieve annotation base id." + annotationBase <- annotationDAO.findOne(annotationBaseId) ?~> "Failed to retrieve annotation base." + datasetName <- datasetDAO.getNameById(annotationBase._dataset)(GlobalAccessContext) ?~> "dataset.notFoundForAnnotation" + dataset <- datasetDAO.findOne(annotationBase._dataset) ?~> Messages("dataset.noAccess", datasetName) + _ <- bool2Fox(dataset.isUsable) ?~> Messages("dataset.notImported", dataset.name) + tracingStoreClient <- tracingStoreService.clientFor(dataset) + _ = logger.info( + f"task assignment. creating annotation $initializingAnnotationId from base $annotationBaseId for task $taskId") + duplicatedAnnotationProto <- tracingStoreClient.duplicateAnnotation( + annotationBaseId, + initializingAnnotationId, + version = None, + isFromTask = false, // isFromTask is when duplicate is called on a task annotation, not when a task is assigned + datasetBoundingBox = None + ) + newAnnotation = annotationBase.copy( + _id = initializingAnnotationId, + _user = user._id, + annotationLayers = duplicatedAnnotationProto.annotationLayers.map(AnnotationLayer.fromProto).toList, + state = Active, + typ = AnnotationType.Task, + created = Instant.now, + modified = Instant.now + ) + _ <- annotationDAO.updateInitialized(newAnnotation) + } yield newAnnotation def createSkeletonTracingBase(datasetId: ObjectId, boundingBox: Option[BoundingBox], @@ -597,13 +445,15 @@ class AnnotationService @Inject()( case _ => annotationDAO.abortInitializingAnnotation(initializingAnnotationId) } - def createAnnotationBase( + // Save annotation base to postgres AND annotation proto to tracingstore. + def createAndSaveAnnotationBase( taskFox: Fox[Task], userId: ObjectId, skeletonTracingIdBox: Box[Option[String]], volumeTracingIdBox: Box[Option[String]], datasetId: ObjectId, - description: Option[String] + description: Option[String], + tracingStoreClient: WKRemoteTracingStoreClient )(implicit ctx: DBAccessContext): Fox[Unit] = for { task <- taskFox @@ -620,27 +470,34 @@ class AnnotationService @Inject()( annotationLayers, description.getOrElse(""), typ = AnnotationType.TracingBase) + annotationBaseProto = AnnotationProto( + description = AnnotationDefaults.defaultDescription, + version = 0L, + annotationLayers = annotationLayers.map(_.toProto), + earliestAccessibleVersion = 0L + ) + _ <- tracingStoreClient.saveAnnotationProto(annotationBase._id, annotationBaseProto) _ <- annotationDAO.insertOne(annotationBase) } yield () def createFrom(user: User, dataset: Dataset, - annotationLayers: List[AnnotationLayer], + annotationLayers: Seq[AnnotationLayer], annotationType: AnnotationType, name: Option[String], - description: String): Fox[Annotation] = + description: String, + newAnnotationId: ObjectId): Fox[Annotation] = for { teamId <- selectSuitableTeam(user, dataset) - annotation = Annotation(ObjectId.generate, + annotation = Annotation(newAnnotationId, dataset._id, None, teamId, user._id, - annotationLayers, + annotationLayers.toList, description, name = name.getOrElse(""), typ = annotationType) - _ <- annotationDAO.insertOne(annotation) } yield annotation def updateTeamsForSharedAnnotation(annotationId: ObjectId, teams: List[ObjectId])( @@ -751,10 +608,7 @@ class AnnotationService @Inject()( case Some(_) if skipVolumeData => Fox.successful(None) case Some(tracingId) => tracingStoreClient - .getVolumeData(tracingId, - version = None, - volumeDataZipFormat = volumeDataZipFormat, - voxelSize = dataset.voxelSize) + .getVolumeData(tracingId, volumeDataZipFormat = volumeDataZipFormat, voxelSize = dataset.voxelSize) .map(Some(_)) } } yield tracingDataObjects @@ -838,31 +692,13 @@ class AnnotationService @Inject()( updated <- annotationInformationProvider.provideAnnotation(typ, id, issuingUser) } yield updated - def resetToBase(annotation: Annotation)(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[Unit] = - annotation.typ match { - case AnnotationType.Explorational => - Fox.failure("annotation.revert.tasksOnly") - case AnnotationType.Task => - for { - task <- taskFor(annotation) - oldSkeletonTracingIdOpt <- annotation.skeletonTracingId // This also asserts that the annotation does not have multiple volume/skeleton layers - oldVolumeTracingIdOpt <- annotation.volumeTracingId - _ = logger.warn( - s"Resetting annotation ${annotation._id} to base, discarding skeleton tracing $oldSkeletonTracingIdOpt and/or volume tracing $oldVolumeTracingIdOpt") - annotationBase <- baseForTask(task._id) - dataset <- datasetDAO.findOne(annotationBase._dataset)(GlobalAccessContext) ?~> "dataset.notFoundForAnnotation" - (newSkeletonIdOpt, newVolumeIdOpt) <- tracingsFromBase(annotationBase, dataset) - _ <- Fox.bool2Fox(newSkeletonIdOpt.isDefined || newVolumeIdOpt.isDefined) ?~> "annotation.needsEitherSkeletonOrVolume" - _ <- Fox.runOptional(newSkeletonIdOpt)(newSkeletonId => - oldSkeletonTracingIdOpt.toFox.map { oldSkeletonId => - annotationLayersDAO.replaceTracingId(annotation._id, oldSkeletonId, newSkeletonId) - }) - _ <- Fox.runOptional(newVolumeIdOpt)(newVolumeId => - oldVolumeTracingIdOpt.toFox.map { oldVolumeId => - annotationLayersDAO.replaceTracingId(annotation._id, oldVolumeId, newVolumeId) - }) - } yield () - } + def resetToBase(annotation: Annotation)(implicit ctx: DBAccessContext): Fox[Unit] = + for { + _ <- bool2Fox(annotation.typ == AnnotationType.Task) ?~> "annotation.revert.tasksOnly" + dataset <- datasetDAO.findOne(annotation._dataset) + tracingStoreClient <- tracingStoreService.clientFor(dataset) + _ <- tracingStoreClient.resetToBase(annotation._id) ?~> "annotation.revert.failed" + } yield () private def settingsFor(annotation: Annotation)(implicit ctx: DBAccessContext) = if (annotation.typ == AnnotationType.Task || annotation.typ == AnnotationType.TracingBase) @@ -1035,4 +871,12 @@ class AnnotationService @Inject()( "volume" } } + + def updateStatistics(annotationId: ObjectId, statistics: JsObject): Unit = + // Fail silently, because the layer may not (yet/anymore) be present in postgres at this time + statistics.value.toSeq.map { + case (tracingId, statisticsForTracing) => + annotationLayerDAO.updateStatistics(annotationId, tracingId, statisticsForTracing) + } + } diff --git a/app/models/annotation/AnnotationStore.scala b/app/models/annotation/AnnotationStore.scala index 14c0f6d1c42..eb55c072934 100755 --- a/app/models/annotation/AnnotationStore.scala +++ b/app/models/annotation/AnnotationStore.scala @@ -19,8 +19,6 @@ class AnnotationStore @Inject()( private val cacheTimeout = 60 minutes - case class StoredResult(result: Fox[Annotation], timestamp: Long = System.currentTimeMillis) - def requestAnnotation(id: AnnotationIdentifier, user: Option[User])(implicit ctx: DBAccessContext): Fox[Annotation] = requestFromCache(id).getOrElse(requestFromHandler(id, user)).futureBox.recover { case e => @@ -54,10 +52,13 @@ class AnnotationStore @Inject()( temporaryAnnotationStore.insert(id.toUniqueString, annotation, Some(cacheTimeout)) private def getFromCache(annotationId: AnnotationIdentifier): Option[Fox[Annotation]] = - temporaryAnnotationStore.find(annotationId.toUniqueString).map(Fox.successful(_)) + temporaryAnnotationStore.get(annotationId.toUniqueString).map(Fox.successful(_)) + + def findInCache(annotationId: String): Box[Annotation] = + temporaryAnnotationStore.getAll.find(a => a._id.toString == annotationId) def findCachedByTracingId(tracingId: String): Box[Annotation] = { - val annotationOpt = temporaryAnnotationStore.findAll.find(a => a.annotationLayers.exists(_.tracingId == tracingId)) + val annotationOpt = temporaryAnnotationStore.getAll.find(a => a.annotationLayers.exists(_.tracingId == tracingId)) annotationOpt match { case Some(annotation) => Full(annotation) case None => Empty diff --git a/app/models/annotation/TracingDataSourceTemporaryStore.scala b/app/models/annotation/TracingDataSourceTemporaryStore.scala index 4140bf22269..f73e88f6fd4 100644 --- a/app/models/annotation/TracingDataSourceTemporaryStore.scala +++ b/app/models/annotation/TracingDataSourceTemporaryStore.scala @@ -19,6 +19,6 @@ class TracingDataSourceTemporaryStore @Inject()(temporaryStore: TemporaryStore[S temporaryStore.insert(tracingId, dataSource, Some(timeOut)) def find(tracingId: String): Option[DataSourceLike] = - temporaryStore.find(tracingId) + temporaryStore.get(tracingId) } diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index 4502ae0c0fd..30c85a586e7 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -3,9 +3,11 @@ package models.annotation import java.io.File import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.ZipIO +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.bool2Fox import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracings} import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracings} import com.scalableminds.webknossos.datastore.models.VoxelSize @@ -33,14 +35,17 @@ class WKRemoteTracingStoreClient( tracingDataSourceTemporaryStore: TracingDataSourceTemporaryStore)(implicit ec: ExecutionContext) extends LazyLogging { - def baseInfo = s" Dataset: ${dataset.name} Tracingstore: ${tracingStore.url}" + private def baseInfo = s" Dataset: ${dataset.name} Tracingstore: ${tracingStore.url}" - def getSkeletonTracing(annotationLayer: AnnotationLayer, version: Option[Long]): Fox[FetchedAnnotationLayer] = { + def getSkeletonTracing(annotationId: ObjectId, + annotationLayer: AnnotationLayer, + version: Option[Long]): Fox[FetchedAnnotationLayer] = { logger.debug("Called to get SkeletonTracing." + baseInfo) for { _ <- bool2Fox(annotationLayer.typ == AnnotationLayerType.Skeleton) ?~> "annotation.download.fetch.notSkeleton" skeletonTracing <- rpc(s"${tracingStore.url}/tracings/skeleton/${annotationLayer.tracingId}") .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryString("annotationId" -> annotationId.toString) .addQueryStringOptional("version", version.map(_.toString)) .withLongTimeout .getWithProtoResponse[SkeletonTracing](SkeletonTracing) @@ -80,85 +85,83 @@ class WKRemoteTracingStoreClient( .postProtoWithJsonResponse[SkeletonTracings, List[Box[Option[String]]]](tracings) } + def saveAnnotationProto(annotationId: ObjectId, annotationProto: AnnotationProto): Fox[Unit] = { + logger.debug( + f"Called to save AnnotationProto $annotationId with layers ${annotationProto.annotationLayers.map(_.tracingId).mkString(",")}." + baseInfo) + rpc(s"${tracingStore.url}/tracings/annotation/save") + .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryString("annotationId" -> annotationId.toString) + .postProto[AnnotationProto](annotationProto) + } + + // Used in duplicate route. History and version are kept + def duplicateAnnotation(annotationId: ObjectId, + newAnnotationId: ObjectId, + version: Option[Long], + isFromTask: Boolean, + datasetBoundingBox: Option[BoundingBox]): Fox[AnnotationProto] = { + logger.debug(s"Called to duplicate annotation $annotationId." + baseInfo) + rpc(s"${tracingStore.url}/tracings/annotation/$annotationId/duplicate").withLongTimeout + .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryString("newAnnotationId" -> newAnnotationId.toString) + .addQueryStringOptional("version", version.map(_.toString)) + .addQueryStringOptional("datasetBoundingBox", datasetBoundingBox.map(_.toLiteral)) + .addQueryString("isFromTask" -> isFromTask.toString) + .postWithProtoResponse[AnnotationProto]()(AnnotationProto) + } + + // Used in task creation. History is dropped, new version will be zero. def duplicateSkeletonTracing(skeletonTracingId: String, - versionString: Option[String] = None, - isFromTask: Boolean = false, editPosition: Option[Vec3Int] = None, editRotation: Option[Vec3Double] = None, - boundingBox: Option[BoundingBox] = None): Fox[String] = { - logger.debug("Called to duplicate SkeletonTracing." + baseInfo) + boundingBox: Option[BoundingBox] = None): Fox[String] = rpc(s"${tracingStore.url}/tracings/skeleton/$skeletonTracingId/duplicate").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryStringOptional("version", versionString) .addQueryStringOptional("editPosition", editPosition.map(_.toUriLiteral)) .addQueryStringOptional("editRotation", editRotation.map(_.toUriLiteral)) .addQueryStringOptional("boundingBox", boundingBox.map(_.toLiteral)) - .addQueryString("fromTask" -> isFromTask.toString) - .postWithJsonResponse[String] - } + .postWithJsonResponse[String]() + // Used in task creation. History is dropped, new version will be zero. def duplicateVolumeTracing(volumeTracingId: String, - isFromTask: Boolean = false, - datasetBoundingBox: Option[BoundingBox] = None, magRestrictions: MagRestrictions = MagRestrictions.empty, - downsample: Boolean = false, editPosition: Option[Vec3Int] = None, editRotation: Option[Vec3Double] = None, - boundingBox: Option[BoundingBox] = None): Fox[String] = { - logger.debug(s"Called to duplicate volume tracing $volumeTracingId. $baseInfo") + boundingBox: Option[BoundingBox] = None): Fox[String] = rpc(s"${tracingStore.url}/tracings/volume/$volumeTracingId/duplicate").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("fromTask" -> isFromTask.toString) - .addQueryStringOptional("minMag", magRestrictions.minStr) - .addQueryStringOptional("maxMag", magRestrictions.maxStr) .addQueryStringOptional("editPosition", editPosition.map(_.toUriLiteral)) .addQueryStringOptional("editRotation", editRotation.map(_.toUriLiteral)) .addQueryStringOptional("boundingBox", boundingBox.map(_.toLiteral)) - .addQueryString("downsample" -> downsample.toString) - .postJsonWithJsonResponse[Option[BoundingBox], String](datasetBoundingBox) - } - - def addSegmentIndex(volumeTracingId: String, dryRun: Boolean): Fox[Unit] = - rpc(s"${tracingStore.url}/tracings/volume/$volumeTracingId/addSegmentIndex").withLongTimeout - .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("dryRun" -> dryRun.toString) - .silent - .post() - .map(_ => ()) - - def mergeSkeletonTracingsByIds(tracingIds: List[String], persistTracing: Boolean): Fox[String] = { - logger.debug("Called to merge SkeletonTracings by ids." + baseInfo) - rpc(s"${tracingStore.url}/tracings/skeleton/mergedFromIds").withLongTimeout - .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("persist" -> persistTracing.toString) - .postJsonWithJsonResponse[List[TracingSelector], String](tracingIds.map(TracingSelector(_))) - } + .addQueryStringOptional("minMag", magRestrictions.minStr) + .addQueryStringOptional("maxMag", magRestrictions.maxStr) + .postWithJsonResponse[String]() - def mergeVolumeTracingsByIds(tracingIds: List[String], persistTracing: Boolean): Fox[String] = { - logger.debug("Called to merge VolumeTracings by ids." + baseInfo) - rpc(s"${tracingStore.url}/tracings/volume/mergedFromIds").withLongTimeout + def mergeAnnotationsByIds(annotationIds: List[String], + newAnnotationId: ObjectId, + toTemporaryStore: Boolean): Fox[AnnotationProto] = { + logger.debug(s"Called to merge ${annotationIds.length} annotations by ids." + baseInfo) + rpc(s"${tracingStore.url}/tracings/annotation/mergedFromIds").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("persist" -> persistTracing.toString) - .postJsonWithJsonResponse[List[TracingSelector], String](tracingIds.map(TracingSelector(_))) + .addQueryString("toTemporaryStore" -> toTemporaryStore.toString) + .addQueryString("newAnnotationId" -> newAnnotationId.toString) + .postJsonWithProtoResponse[List[String], AnnotationProto](annotationIds)(AnnotationProto) } - def mergeSkeletonTracingsByContents(tracings: SkeletonTracings, persistTracing: Boolean): Fox[String] = { + def mergeSkeletonTracingsByContents(tracings: SkeletonTracings): Fox[String] = { logger.debug("Called to merge SkeletonTracings by contents." + baseInfo) rpc(s"${tracingStore.url}/tracings/skeleton/mergedFromContents").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("persist" -> persistTracing.toString) .postProtoWithJsonResponse[SkeletonTracings, String](tracings) } def mergeVolumeTracingsByContents(tracings: VolumeTracings, dataSource: DataSourceLike, - initialData: List[Option[File]], - persistTracing: Boolean): Fox[String] = { + initialData: List[Option[File]]): Fox[String] = { logger.debug("Called to merge VolumeTracings by contents." + baseInfo) for { tracingId <- rpc(s"${tracingStore.url}/tracings/volume/mergedFromContents") .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("persist" -> persistTracing.toString) .postProtoWithJsonResponse[VolumeTracings, String](tracings) packedVolumeDataZips = packVolumeDataZips(initialData.flatten) _ = tracingDataSourceTemporaryStore.store(tracingId, dataSource) @@ -174,11 +177,13 @@ class WKRemoteTracingStoreClient( def saveVolumeTracing(tracing: VolumeTracing, initialData: Option[File] = None, magRestrictions: MagRestrictions = MagRestrictions.empty, - dataSource: Option[DataSourceLike] = None): Fox[String] = { + dataSource: Option[DataSourceLike] = None, + newTracingId: Option[String] = None): Fox[String] = { logger.debug("Called to create VolumeTracing." + baseInfo) for { tracingId <- rpc(s"${tracingStore.url}/tracings/volume/save") .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryStringOptional("newTracingId", newTracingId) .postProtoWithJsonResponse[VolumeTracing, String](tracing) _ = dataSource.foreach(d => tracingDataSourceTemporaryStore.store(tracingId, d)) _ <- initialData match { @@ -194,8 +199,9 @@ class WKRemoteTracingStoreClient( } yield tracingId } - def getVolumeTracing(annotationLayer: AnnotationLayer, - version: Option[Long] = None, + def getVolumeTracing(annotationId: ObjectId, + annotationLayer: AnnotationLayer, + version: Option[Long], skipVolumeData: Boolean, volumeDataZipFormat: VolumeDataZipFormat, voxelSize: Option[VoxelSize]): Fox[FetchedAnnotationLayer] = { @@ -205,12 +211,14 @@ class WKRemoteTracingStoreClient( tracingId = annotationLayer.tracingId tracing <- rpc(s"${tracingStore.url}/tracings/volume/$tracingId") .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryString("annotationId" -> annotationId.toString) .addQueryStringOptional("version", version.map(_.toString)) .getWithProtoResponse[VolumeTracing](VolumeTracing) data <- Fox.runIf(!skipVolumeData) { rpc(s"${tracingStore.url}/tracings/volume/$tracingId/allDataZip").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("volumeDataZipFormat" -> volumeDataZipFormat.toString) + .addQueryString("annotationId" -> annotationId.toString) .addQueryStringOptional("version", version.map(_.toString)) .addQueryStringOptional("voxelSizeFactor", voxelSize.map(_.factor.toUriLiteral)) .addQueryStringOptional("voxelSizeUnit", voxelSize.map(_.unit.toString)) @@ -221,7 +229,6 @@ class WKRemoteTracingStoreClient( } def getVolumeData(tracingId: String, - version: Option[Long] = None, volumeDataZipFormat: VolumeDataZipFormat, voxelSize: Option[VoxelSize]): Fox[Array[Byte]] = { logger.debug("Called to get volume data." + baseInfo) @@ -229,11 +236,17 @@ class WKRemoteTracingStoreClient( data <- rpc(s"${tracingStore.url}/tracings/volume/$tracingId/allDataZip").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("volumeDataZipFormat" -> volumeDataZipFormat.toString) - .addQueryStringOptional("version", version.map(_.toString)) .addQueryStringOptional("voxelSizeFactor", voxelSize.map(_.factor.toUriLiteral)) .addQueryStringOptional("voxelSizeUnit", voxelSize.map(_.unit.toString)) .getWithBytesResponse } yield data } + def resetToBase(annotationId: ObjectId): Fox[Unit] = + for { + _ <- rpc(s"${tracingStore.url}/tracings/annotation/$annotationId/resetToBase").withLongTimeout + .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .post() + } yield () + } diff --git a/app/models/annotation/handler/ProjectInformationHandler.scala b/app/models/annotation/handler/ProjectInformationHandler.scala index 4d38c9c1758..8fddcb89828 100755 --- a/app/models/annotation/handler/ProjectInformationHandler.scala +++ b/app/models/annotation/handler/ProjectInformationHandler.scala @@ -29,7 +29,7 @@ class ProjectInformationHandler @Inject()(annotationDAO: AnnotationDAO, _ <- assertNonEmpty(annotations) ?~> "project.noAnnotations" datasetId <- annotations.headOption.map(_._dataset).toFox mergedAnnotation <- annotationMerger.mergeN(projectId, - persistTracing = false, + toTemporaryStore = true, user._id, datasetId, project._team, diff --git a/app/models/annotation/handler/TaskInformationHandler.scala b/app/models/annotation/handler/TaskInformationHandler.scala index 1ba9f90ba7f..d2f8d155ebe 100755 --- a/app/models/annotation/handler/TaskInformationHandler.scala +++ b/app/models/annotation/handler/TaskInformationHandler.scala @@ -32,7 +32,7 @@ class TaskInformationHandler @Inject()(taskDAO: TaskDAO, project <- projectDAO.findOne(task._project) datasetId <- finishedAnnotations.headOption.map(_._dataset).toFox mergedAnnotation <- annotationMerger.mergeN(task._id, - persistTracing = false, + toTemporaryStore = true, user._id, datasetId, project._team, diff --git a/app/models/annotation/handler/TaskTypeInformationHandler.scala b/app/models/annotation/handler/TaskTypeInformationHandler.scala index f51a3b22f55..95e18eb77d5 100755 --- a/app/models/annotation/handler/TaskTypeInformationHandler.scala +++ b/app/models/annotation/handler/TaskTypeInformationHandler.scala @@ -34,7 +34,7 @@ class TaskTypeInformationHandler @Inject()(taskTypeDAO: TaskTypeDAO, user <- userOpt ?~> "user.notAuthorised" datasetId <- finishedAnnotations.headOption.map(_._dataset).toFox mergedAnnotation <- annotationMerger.mergeN(taskTypeId, - persistTracing = false, + toTemporaryStore = true, user._id, datasetId, taskType._team, diff --git a/app/models/annotation/nml/NmlWriter.scala b/app/models/annotation/nml/NmlWriter.scala index 3ea35670c17..218b2f2749a 100644 --- a/app/models/annotation/nml/NmlWriter.scala +++ b/app/models/annotation/nml/NmlWriter.scala @@ -13,7 +13,7 @@ import com.scalableminds.webknossos.datastore.models.VoxelSize import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayerType, FetchedAnnotationLayer} import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.sun.xml.txw2.output.IndentingXMLStreamWriter -import models.annotation.Annotation +import models.annotation.{Annotation, AnnotationLayerPrecedence} import models.task.Task import models.user.User @@ -39,7 +39,7 @@ case class NmlParameters( editPositionAdditionalCoordinates: Seq[AdditionalCoordinateProto] ) -class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { +class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits with AnnotationLayerPrecedence { private lazy val outputService = XMLOutputFactory.newInstance() def toNmlStream(name: String, @@ -138,7 +138,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { datasetId: ObjectId, voxelSize: Option[VoxelSize]): Fox[NmlParameters] = for { - parameterSourceAnnotationLayer <- selectLayerWithPrecedence(skeletonLayers, volumeLayers) + parameterSourceAnnotationLayer <- selectLayerWithPrecedenceFetched(skeletonLayers, volumeLayers) nmlParameters = parameterSourceAnnotationLayer.tracing match { case Left(s) => NmlParameters( @@ -179,15 +179,6 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { } } yield nmlParameters - // If there is more than one tracing, select the one that has precedence for the parameters (they should be identical anyway) - private def selectLayerWithPrecedence(skeletonLayers: List[FetchedAnnotationLayer], - volumeLayers: List[FetchedAnnotationLayer]): Fox[FetchedAnnotationLayer] = - if (skeletonLayers.nonEmpty) { - Fox.successful(skeletonLayers.minBy(_.tracingId)) - } else if (volumeLayers.nonEmpty) { - Fox.successful(volumeLayers.minBy(_.tracingId)) - } else Fox.failure("annotation.download.noLayers") - private def writeParameters(parameters: NmlParameters)(implicit writer: XMLStreamWriter): Unit = Xml.withinElementSync("parameters") { Xml.withinElementSync("experiment") { diff --git a/app/models/task/TaskCreationService.scala b/app/models/task/TaskCreationService.scala index 6a5acca330d..d050cfd7ef2 100644 --- a/app/models/task/TaskCreationService.scala +++ b/app/models/task/TaskCreationService.scala @@ -154,7 +154,12 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, for { volumeTracingOpt <- baseAnnotation.volumeTracingId newVolumeTracingId <- volumeTracingOpt - .map(id => tracingStoreClient.duplicateVolumeTracing(id, magRestrictions = magRestrictions)) + .map( + id => + tracingStoreClient.duplicateVolumeTracing(id, + editPosition = Some(params.editPosition), + editRotation = Some(params.editRotation), + magRestrictions = magRestrictions)) .getOrElse( annotationService .createVolumeTracingBase( @@ -434,13 +439,14 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, .toList createAnnotationBaseResults: List[Fox[Unit]] = zipped.map( tuple => - annotationService.createAnnotationBase( + annotationService.createAndSaveAnnotationBase( taskFox = tuple._3, requestingUser._id, skeletonTracingIdBox = tuple._2._1, volumeTracingIdBox = tuple._2._2, dataset._id, - description = tuple._1.map(_._1.description).openOr(None) + description = tuple._1.map(_._1.description).openOr(None), + tracingStoreClient )) warnings <- warnIfTeamHasNoAccess(fullTasks.map(_._1), dataset, requestingUser) zippedTasksAndAnnotations = taskObjects zip createAnnotationBaseResults diff --git a/app/models/user/time/TimeSpan.scala b/app/models/user/time/TimeSpan.scala index e594ad2dd4b..ac5c53a9923 100644 --- a/app/models/user/time/TimeSpan.scala +++ b/app/models/user/time/TimeSpan.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.schema.Tables._ import models.annotation.AnnotationState.AnnotationState import models.annotation.AnnotationType.AnnotationType -import play.api.libs.json.{JsArray, JsObject, JsValue, Json} +import play.api.libs.json.{JsObject, JsValue, Json} import slick.lifted.Rep import utils.sql.{SQLDAO, SqlClient, SqlToken} import com.scalableminds.util.objectid.ObjectId @@ -96,7 +96,7 @@ class TimeSpanDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContext) AND a.state IN ${SqlToken.tupleFromList(annotationStates)} GROUP BY a._id, t._id, p.name ) - SELECT ti._annotation, ti._task, ti.projectName, ti.timeSummed, JSON_AGG(al.statistics) AS layerStatistics + SELECT ti._annotation, ti._task, ti.projectName, ti.timeSummed, JSON_OBJECT_AGG(al.tracingId, al.statistics) AS layerStatistics FROM timeSummedPerAnnotation ti JOIN webknossos.annotation_layers al ON al._annotation = ti._annotation GROUP BY ti._annotation, ti._task, ti.projectName, ti.timeSummed @@ -104,7 +104,7 @@ class TimeSpanDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContext) """.as[(String, Option[String], Option[String], Long, String)] ) parsed = tuples.map { t => - val layerStats: JsArray = Json.parse(t._5).validate[JsArray].getOrElse(Json.arr()) + val layerStats: JsObject = Json.parse(t._5).validate[JsObject].getOrElse(Json.obj()) Json.obj( "annotation" -> t._1, "task" -> t._2, diff --git a/conf/logback-dev.xml b/conf/logback-dev.xml index e8aa9cb9f8a..9b7641e737d 100644 --- a/conf/logback-dev.xml +++ b/conf/logback-dev.xml @@ -17,7 +17,7 @@ - + diff --git a/conf/messages b/conf/messages index eb03c7c06f5..b430154c836 100644 --- a/conf/messages +++ b/conf/messages @@ -186,6 +186,7 @@ annotation.volume.invalidLargestSegmentId=Cannot create tasks with fallback segm annotation.volume.magRestrictionsTooTight=Task type mag restrictions are too tight, resulting annotation has no magnifications. annotation.volume.magssDoNotMatch=Could not merge volume annotations, as their magnifications differ. Please ensure each annotation has the same set of mags. annotation.volume.largestSegmentIdExceedsRange=The largest segment id {0} specified for the annotation layer exceeds the range of its data type {1} +annotation.volume.noEditableMapping=This volume tracing does not have an editable mapping (not a “proofreading” annotation layer) annotation.notFound=Annotation could not be found annotation.notFound.considerLoggingIn=Annotation could not be found. If the annotation is not public, you need to log in to see it. annotation.invalid=Invalid annotation @@ -249,6 +250,7 @@ annotation.deleteLayer.explorationalsOnly=Could not delete a layer because it is annotation.deleteLayer.onlyLayer=Could not delete layer because it is the only layer in this annotation. annotation.layer.notFound=Layer could not be found. annotation.getNewestVersion.failed=Could not get the newest version information for this annotation layer +annotation.idForTracing.failed=Could not find the annotation id for this tracing id. mesh.notFound=Mesh could not be found mesh.write.failed=Failed to convert mesh info to json diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 76e22722a22..2e1372baf7a 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -122,11 +122,14 @@ PUT /datastores/:name # Tracingstores GET /tracingstore controllers.TracingStoreController.listOne() POST /tracingstores/:name/handleTracingUpdateReport controllers.WKRemoteTracingStoreController.handleTracingUpdateReport(name: String, key: String) +POST /tracingstores/:name/updateAnnotation controllers.WKRemoteTracingStoreController.updateAnnotation(name: String, key: String, annotationId: String) POST /tracingstores/:name/validateUserAccess controllers.UserTokenController.validateAccessViaTracingstore(name: String, key: String, token: Option[String]) PUT /tracingstores/:name controllers.TracingStoreController.update(name: String) GET /tracingstores/:name/dataSource controllers.WKRemoteTracingStoreController.dataSourceForTracing(name: String, key: String, tracingId: String) GET /tracingstores/:name/dataSourceId controllers.WKRemoteTracingStoreController.dataSourceIdForTracing(name: String, key: String, tracingId: String) +GET /tracingstores/:name/annotationId controllers.WKRemoteTracingStoreController.annotationIdForTracing(name: String, key: String, tracingId: String) GET /tracingstores/:name/dataStoreUri/:datasetDirectoryName controllers.WKRemoteTracingStoreController.dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], datasetDirectoryName: String) +POST /tracingstores/:name/createTracing controllers.WKRemoteTracingStoreController.createTracing(name: String, key: String, annotationId: String, previousVersion: Long) # User access tokens for datastore authentication POST /userToken/generate controllers.UserTokenController.generateTokenForDataStore() @@ -144,25 +147,16 @@ PUT /annotations/:typ/:id/reset PATCH /annotations/:typ/:id/transfer controllers.AnnotationController.transfer(typ: String, id: String) PATCH /annotations/:typ/:id/editLockedState controllers.AnnotationController.editLockedState(typ: String, id: String, isLockedByOwner: Boolean) -GET /annotations/:id/info controllers.AnnotationController.infoWithoutType(id: String, timestamp: Long) -PATCH /annotations/:id/makeHybrid controllers.AnnotationController.makeHybridWithoutType(id: String, fallbackLayerName: Option[String]) -PATCH /annotations/:id/downsample controllers.AnnotationController.downsampleWithoutType(id: String, tracingId: String) -PATCH /annotations/:id/addAnnotationLayer controllers.AnnotationController.addAnnotationLayerWithoutType(id: String) -PATCH /annotations/:id/deleteAnnotationLayer controllers.AnnotationController.deleteAnnotationLayerWithoutType(id: String, layerName: String) +GET /annotations/:id/info controllers.AnnotationController.infoWithoutType(id: String, timestamp: Option[Long]) DELETE /annotations/:id controllers.AnnotationController.cancelWithoutType(id: String) POST /annotations/:id/merge/:mergedTyp/:mergedId controllers.AnnotationController.mergeWithoutType(id: String, mergedTyp: String, mergedId: String) -GET /annotations/:id/download controllers.AnnotationIOController.downloadWithoutType(id: String, skeletonVersion: Option[Long], volumeVersion: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) +GET /annotations/:id/download controllers.AnnotationIOController.downloadWithoutType(id: String, version: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) POST /annotations/:id/acquireMutex controllers.AnnotationController.tryAcquiringAnnotationMutex(id: String) -PATCH /annotations/addSegmentIndicesToAll controllers.AnnotationController.addSegmentIndicesToAll(parallelBatchCount: Int, dryRun: Boolean, skipTracings: Option[String]) -GET /annotations/:typ/:id/info controllers.AnnotationController.info(typ: String, id: String, timestamp: Long) -PATCH /annotations/:typ/:id/makeHybrid controllers.AnnotationController.makeHybrid(typ: String, id: String, fallbackLayerName: Option[String]) -PATCH /annotations/:typ/:id/downsample controllers.AnnotationController.downsample(typ: String, id: String, tracingId: String) -PATCH /annotations/:typ/:id/addAnnotationLayer controllers.AnnotationController.addAnnotationLayer(typ: String, id: String) -PATCH /annotations/:typ/:id/deleteAnnotationLayer controllers.AnnotationController.deleteAnnotationLayer(typ: String, id: String, layerName: String) +GET /annotations/:typ/:id/info controllers.AnnotationController.info(typ: String, id: String, timestamp: Option[Long]) DELETE /annotations/:typ/:id controllers.AnnotationController.cancel(typ: String, id: String) POST /annotations/:typ/:id/merge/:mergedTyp/:mergedId controllers.AnnotationController.merge(typ: String, id: String, mergedTyp: String, mergedId: String) -GET /annotations/:typ/:id/download controllers.AnnotationIOController.download(typ: String, id: String, skeletonVersion: Option[Long], volumeVersion: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) +GET /annotations/:typ/:id/download controllers.AnnotationIOController.download(typ: String, id: String, version: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) GET /annotations/source/:accessTokenOrId controllers.AnnotationPrivateLinkController.annotationSource(accessTokenOrId: String, userToken: Option[String]) diff --git a/conf/webknossos.versioned.routes b/conf/webknossos.versioned.routes index 3ee48062b8a..c32b36c613e 100644 --- a/conf/webknossos.versioned.routes +++ b/conf/webknossos.versioned.routes @@ -26,7 +26,7 @@ GET /v8/tasks/:id co POST /v8/tasks controllers.LegacyApiController.createTaskV8() PUT /v8/tasks/:id controllers.LegacyApiController.updateTaskV8(id: String) GET /v8/projects/:id/tasks controllers.LegacyApiController.tasksForProjectV8(id: String, limit: Option[Int], pageNumber: Option[Int], includeTotalCount: Option[Boolean]) -GET /v8/annotations/:id/info controllers.LegacyApiController.annotationInfoV8(id: String, timestamp: Long) +GET /v8/annotations/:id/info controllers.LegacyApiController.annotationInfoV8(id: String, timestamp: Option[Long]) GET /v8/tasks/:id/annotations controllers.LegacyApiController.annotationsForTaskV8(id: String) -> /v8/ webknossos.latest.Routes @@ -40,7 +40,7 @@ GET /v7/tasks/:id co POST /v7/tasks controllers.LegacyApiController.createTaskV8() PUT /v7/tasks/:id controllers.LegacyApiController.updateTaskV8(id: String) GET /v7/projects/:id/tasks controllers.LegacyApiController.tasksForProjectV8(id: String, limit: Option[Int], pageNumber: Option[Int], includeTotalCount: Option[Boolean]) -GET /v7/annotations/:id/info controllers.LegacyApiController.annotationInfoV8(id: String, timestamp: Long) +GET /v7/annotations/:id/info controllers.LegacyApiController.annotationInfoV8(id: String, timestamp: Option[Long]) GET /v7/tasks/:id/annotations controllers.LegacyApiController.annotationsForTaskV8(id: String) # v7: support changes to v8 @@ -57,7 +57,7 @@ GET /v6/tasks/:id co POST /v6/tasks controllers.LegacyApiController.createTaskV8() PUT /v6/tasks/:id controllers.LegacyApiController.updateTaskV8(id: String) GET /v6/projects/:id/tasks controllers.LegacyApiController.tasksForProjectV8(id: String, limit: Option[Int], pageNumber: Option[Int], includeTotalCount: Option[Boolean]) -GET /v6/annotations/:id/info controllers.LegacyApiController.annotationInfoV8(id: String, timestamp: Long) +GET /v6/annotations/:id/info controllers.LegacyApiController.annotationInfoV8(id: String, timestamp: Option[Long]) GET /v6/tasks/:id/annotations controllers.LegacyApiController.annotationsForTaskV8(id: String) @@ -77,7 +77,7 @@ GET /v5/tasks/:id co POST /v5/tasks controllers.LegacyApiController.createTaskV8() PUT /v5/tasks/:id controllers.LegacyApiController.updateTaskV8(id: String) GET /v5/projects/:id/tasks controllers.LegacyApiController.tasksForProjectV8(id: String, limit: Option[Int], pageNumber: Option[Int], includeTotalCount: Option[Boolean]) -GET /v5/annotations/:id/info controllers.LegacyApiController.annotationInfoV8(id: String, timestamp: Long) +GET /v5/annotations/:id/info controllers.LegacyApiController.annotationInfoV8(id: String, timestamp: Option[Long]) GET /v5/tasks/:id/annotations controllers.LegacyApiController.annotationsForTaskV8(id: String) # v5: support changes to v7 diff --git a/docker-compose.yml b/docker-compose.yml index 879f777e4f7..84a737918ae 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -269,11 +269,11 @@ services: # FossilDB fossildb: - image: scalableminds/fossildb:master__484 + image: scalableminds/fossildb:master__504 command: - fossildb - -c - - skeletons,skeletonUpdates,volumes,volumeData,volumeUpdates,volumeSegmentIndex,editableMappings,editableMappingUpdates,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate + - skeletons,volumes,volumeData,volumeSegmentIndex,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates user: ${USER_UID:-fossildb}:${USER_GID:-fossildb} fossildb-persisted: diff --git a/fossildb/run.sh b/fossildb/run.sh index 55853d3b302..53f56832c2f 100755 --- a/fossildb/run.sh +++ b/fossildb/run.sh @@ -14,7 +14,6 @@ if [ ! -f "$JAR" ] || [ ! "$CURRENT_VERSION" == "$VERSION" ]; then wget -q --show-progress -O "$JAR" "$URL" fi -# Note that the editableMappings column is no longer used by wk. Still here for backwards compatibility. -COLLECTIONS="skeletons,skeletonUpdates,volumes,volumeData,volumeUpdates,volumeSegmentIndex,editableMappings,editableMappingUpdates,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate" +COLLECTIONS="skeletons,volumes,volumeData,volumeSegmentIndex,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates" exec java -jar "$JAR" -c "$COLLECTIONS" -d "$FOSSILDB_HOME/data" -b "$FOSSILDB_HOME/backup" diff --git a/fossildb/version b/fossildb/version index db7a480479e..50140e35363 100644 --- a/fossildb/version +++ b/fossildb/version @@ -1 +1 @@ -0.1.31 +0.1.33 diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index e499afbfb44..5fe11e13db5 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1,67 +1,68 @@ import ResumableJS from "resumablejs"; import _ from "lodash"; import dayjs from "dayjs"; -import type { - APIAnnotation, - APIAnnotationInfo, - APIAnnotationType, - APIAnnotationVisibility, - APIBuildInfo, - APIConnectomeFile, - APIDataSource, - APIDataStore, - APIDataset, - APIDataSourceId, - APIFeatureToggles, - APIHistogramData, - APIMapping, - APIMaybeUnimportedDataset, - APIMeshFile, - APIAvailableTasksReport, - APIOrganization, - APIOrganizationCompact, - APIProject, - APIProjectCreator, - APIProjectProgressReport, - APIProjectUpdater, - APIProjectWithStatus, - APIPublication, - APIMagRestrictions, - APIScript, - APIScriptCreator, - APIScriptUpdater, - APITaskType, - APITeam, - APITimeInterval, - APITimeTrackingPerAnnotation, - APITimeTrackingSpan, - APITracingStore, - APIUpdateActionBatch, - APIUser, - APIUserLoggedTime, - APIUserTheme, - AnnotationLayerDescriptor, - AnnotationViewConfiguration, - EditableLayerProperties, - ExperienceDomainList, - ServerTracing, - TracingType, - ServerEditableMapping, - APICompoundType, - ZarrPrivateLink, - VoxelyticsWorkflowReport, - VoxelyticsChunkStatistics, - ShortLink, - VoxelyticsWorkflowListing, - APIPricingPlanStatus, - VoxelyticsLogLine, - APIUserCompact, - APIDatasetCompact, - MaintenanceInfo, - AdditionalCoordinate, - LayerLink, - VoxelSize, - APITimeTrackingPerUser, +import { + type APIAnnotation, + type APIAnnotationInfo, + type APIAnnotationType, + type APIAnnotationVisibility, + type APIBuildInfo, + type APIConnectomeFile, + type APIDataSource, + type APIDataStore, + type APIDataset, + type APIDataSourceId, + type APIFeatureToggles, + type APIHistogramData, + type APIMapping, + type APIMaybeUnimportedDataset, + type APIMeshFile, + type APIAvailableTasksReport, + type APIOrganization, + type APIOrganizationCompact, + type APIProject, + type APIProjectCreator, + type APIProjectProgressReport, + type APIProjectUpdater, + type APIProjectWithStatus, + type APIPublication, + type APIMagRestrictions, + type APIScript, + type APIScriptCreator, + type APIScriptUpdater, + type APITaskType, + type APITeam, + type APITimeInterval, + type APITimeTrackingPerAnnotation, + type APITimeTrackingSpan, + type APITracingStore, + type APIUpdateActionBatch, + type APIUser, + type APIUserLoggedTime, + type APIUserTheme, + type AnnotationLayerDescriptor, + type AnnotationViewConfiguration, + type ExperienceDomainList, + type ServerTracing, + type TracingType, + type ServerEditableMapping, + type APICompoundType, + type ZarrPrivateLink, + type VoxelyticsWorkflowReport, + type VoxelyticsChunkStatistics, + type ShortLink, + type VoxelyticsWorkflowListing, + type APIPricingPlanStatus, + type VoxelyticsLogLine, + type APIUserCompact, + type APIDatasetCompact, + type MaintenanceInfo, + type AdditionalCoordinate, + type LayerLink, + type VoxelSize, + type APITimeTrackingPerUser, + AnnotationLayerEnum, + type APITracingStoreAnnotation, } from "types/api_flow_types"; import type { AnnotationTypeFilterEnum, LOG_LEVELS, Vector2, Vector3 } from "oxalis/constants"; import Constants, { ControlModeEnum, AnnotationStateFilterEnum } from "oxalis/constants"; @@ -77,9 +78,9 @@ import type { NumberLike, } from "oxalis/store"; import { V3 } from "libs/mjs"; -import type { Versions } from "oxalis/view/version_view"; import { enforceValidatedDatasetViewConfiguration } from "types/schemas/dataset_view_configuration_defaults"; import { + parseProtoAnnotation, parseProtoListOfLong, parseProtoTracing, serializeProtoListOfLong, @@ -91,7 +92,6 @@ import Toast from "libs/toast"; import * as Utils from "libs/utils"; import messages from "messages"; import window, { location } from "libs/window"; -import type { SaveQueueType } from "oxalis/model/actions/save_actions"; import type { DatasourceConfiguration } from "types/schemas/datasource.types"; import { doWithToken } from "./api/token"; import type BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; @@ -487,7 +487,6 @@ export function reOpenAnnotation( export type EditableAnnotation = { name: string; - description: string; visibility: APIAnnotationVisibility; tags: Array; viewConfiguration?: AnnotationViewConfiguration; @@ -530,25 +529,8 @@ export function setOthersMayEditForAnnotation( ); } -export function updateAnnotationLayer( - annotationId: string, - annotationType: APIAnnotationType, - tracingId: string, - layerProperties: EditableLayerProperties, -): Promise<{ - name: string | null | undefined; -}> { - return Request.sendJSONReceiveJSON( - `/api/annotations/${annotationType}/${annotationId}/editLayer/${tracingId}`, - { - method: "PATCH", - data: layerProperties, - }, - ); -} - type AnnotationLayerCreateDescriptor = { - typ: "Skeleton" | "Volume"; + typ: AnnotationLayerEnum; name: string | null | undefined; autoFallbackLayer?: boolean; fallbackLayerName?: string | null | undefined; @@ -556,33 +538,6 @@ type AnnotationLayerCreateDescriptor = { magRestrictions?: APIMagRestrictions | null | undefined; }; -export function addAnnotationLayer( - annotationId: string, - annotationType: APIAnnotationType, - newAnnotationLayer: AnnotationLayerCreateDescriptor, -): Promise { - return Request.sendJSONReceiveJSON( - `/api/annotations/${annotationType}/${annotationId}/addAnnotationLayer`, - { - method: "PATCH", - data: newAnnotationLayer, - }, - ); -} - -export function deleteAnnotationLayer( - annotationId: string, - annotationType: APIAnnotationType, - layerName: string, -): Promise { - return Request.receiveJSON( - `/api/annotations/${annotationType}/${annotationId}/deleteAnnotationLayer?layerName=${layerName}`, - { - method: "PATCH", - }, - ); -} - export function finishAnnotation( annotationId: string, annotationType: APIAnnotationType, @@ -639,7 +594,7 @@ export function duplicateAnnotation( }); } -export async function getAnnotationInformation( +export async function getUnversionedAnnotationInformation( annotationId: string, options: RequestOptions = {}, ): Promise { @@ -692,14 +647,14 @@ export function createExplorational( if (typ === "skeleton") { layers = [ { - typ: "Skeleton", + typ: AnnotationLayerEnum.Skeleton, name: "Skeleton", }, ]; } else if (typ === "volume") { layers = [ { - typ: "Volume", + typ: AnnotationLayerEnum.Volume, name: fallbackLayerName, fallbackLayerName, autoFallbackLayer, @@ -710,11 +665,11 @@ export function createExplorational( } else { layers = [ { - typ: "Skeleton", + typ: AnnotationLayerEnum.Skeleton, name: "Skeleton", }, { - typ: "Volume", + typ: AnnotationLayerEnum.Volume, name: fallbackLayerName, fallbackLayerName, autoFallbackLayer, @@ -729,12 +684,14 @@ export function createExplorational( export async function getTracingsForAnnotation( annotation: APIAnnotation, - versions: Versions = {}, + version?: number | null | undefined, ): Promise> { - const skeletonLayers = annotation.annotationLayers.filter((layer) => layer.typ === "Skeleton"); + const skeletonLayers = annotation.annotationLayers.filter( + (layer) => layer.typ === AnnotationLayerEnum.Skeleton, + ); const fullAnnotationLayers = await Promise.all( annotation.annotationLayers.map((layer) => - getTracingForAnnotationType(annotation, layer, versions), + getTracingForAnnotationType(annotation, layer, version), ), ); @@ -759,39 +716,28 @@ export async function acquireAnnotationMutex( return { canEdit, blockedByUser }; } -function extractVersion( - versions: Versions, - tracingId: string, - typ: "Volume" | "Skeleton", -): number | null | undefined { - if (typ === "Skeleton") { - return versions.skeleton; - } else if (versions.volumes != null) { - return versions.volumes[tracingId]; - } - - return null; -} - export async function getTracingForAnnotationType( annotation: APIAnnotation, annotationLayerDescriptor: AnnotationLayerDescriptor, - versions: Versions = {}, + version?: number | null | undefined, ): Promise { const { tracingId, typ } = annotationLayerDescriptor; - const version = extractVersion(versions, tracingId, typ); const tracingType = typ.toLowerCase() as "skeleton" | "volume"; - const possibleVersionString = version != null ? `&version=${version}` : ""; - const tracingArrayBuffer = await doWithToken((token) => - Request.receiveArraybuffer( - `${annotation.tracingStore.url}/tracings/${tracingType}/${tracingId}?token=${token}${possibleVersionString}`, + const params = new URLSearchParams({ annotationId: annotation.id }); + if (version != null) { + params.append("version", version.toString()); + } + const tracingArrayBuffer = await doWithToken((token) => { + params.append("token", token); + return Request.receiveArraybuffer( + `${annotation.tracingStore.url}/tracings/${tracingType}/${tracingId}?${params}`, { headers: { Accept: "application/x-protobuf", }, }, - ), - ); + ); + }); const tracing = parseProtoTracing(tracingArrayBuffer, tracingType); if (!process.env.IS_TESTING) { @@ -815,8 +761,7 @@ export async function getTracingForAnnotationType( export function getUpdateActionLog( tracingStoreUrl: string, - tracingId: string, - versionedObjectType: SaveQueueType, + annotationId: string, oldestVersion?: number, newestVersion?: number, ): Promise> { @@ -830,23 +775,50 @@ export function getUpdateActionLog( params.append("newestVersion", newestVersion.toString()); } return Request.receiveJSON( - `${tracingStoreUrl}/tracings/${versionedObjectType}/${tracingId}/updateActionLog?${params}`, + `${tracingStoreUrl}/tracings/annotation/${annotationId}/updateActionLog?${params}`, ); }); } -export function getNewestVersionForTracing( +export function getNewestVersionForAnnotation( tracingStoreUrl: string, - tracingId: string, - tracingType: SaveQueueType, + annotationId: string, ): Promise { return doWithToken((token) => Request.receiveJSON( - `${tracingStoreUrl}/tracings/${tracingType}/${tracingId}/newestVersion?token=${token}`, + `${tracingStoreUrl}/tracings/annotation/${annotationId}/newestVersion?token=${token}`, ).then((obj) => obj.version), ); } +export async function getAnnotationProto( + tracingStoreUrl: string, + annotationId: string, + version?: number | null | undefined, +): Promise { + const params = new URLSearchParams(); + if (version != null) { + params.append("version", version.toString()); + } + const annotationArrayBuffer = await doWithToken((token) => { + params.append("token", token); + return Request.receiveArraybuffer( + `${tracingStoreUrl}/tracings/annotation/${annotationId}?${params}`, + { + headers: { + Accept: "application/x-protobuf", + }, + }, + ); + }); + const annotationProto = parseProtoAnnotation(annotationArrayBuffer); + if (!process.env.IS_TESTING) { + // Log to console as the decoded annotationProto is hard to inspect in the devtools otherwise. + console.log("Parsed protobuf annotation:", annotationProto); + } + return annotationProto; +} + export function hasSegmentIndexInDataStore( dataStoreUrl: string, datasetDirectoryName: string, @@ -894,6 +866,7 @@ export async function importVolumeTracing( tracing: Tracing, volumeTracing: VolumeTracing, dataFile: File, + version: number, ): Promise { return doWithToken((token) => Request.sendMultipartFormReceiveJSON( @@ -901,24 +874,13 @@ export async function importVolumeTracing( { data: { dataFile, - currentVersion: volumeTracing.version, + currentVersion: version, }, }, ), ); } -export function convertToHybridTracing( - annotationId: string, - fallbackLayerName: string | null | undefined, -): Promise { - return Request.receiveJSON(`/api/annotations/Explorational/${annotationId}/makeHybrid`, { - method: "PATCH", - // @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ method: "PATCH"; fallbackLayer... Remove this comment to see the full error message - fallbackLayerName, - }); -} - export async function downloadWithFilename(downloadUrl: string) { const link = document.createElement("a"); link.href = downloadUrl; @@ -932,16 +894,14 @@ export async function downloadAnnotation( annotationId: string, annotationType: APIAnnotationType, showVolumeFallbackDownloadWarning: boolean = false, - versions: Versions = {}, + version: number | null | undefined = null, downloadFileFormat: "zarr3" | "wkw" | "nml" = "wkw", includeVolumeData: boolean = true, ) { - const searchParams = new URLSearchParams(); - Object.entries(versions).forEach(([key, val]) => { - if (val != null) { - searchParams.append(`${key}Version`, val.toString()); - } - }); + const params = new URLSearchParams(); + if (version != null) { + params.append("version", version.toString()); + } if (includeVolumeData && showVolumeFallbackDownloadWarning) { Toast.info(messages["annotation.no_fallback_data_included"], { @@ -949,35 +909,20 @@ export async function downloadAnnotation( }); } if (!includeVolumeData) { - searchParams.append("skipVolumeData", "true"); + params.append("skipVolumeData", "true"); } else { if (downloadFileFormat === "nml") { throw new Error( "Cannot download annotation with nml-only format while includeVolumeData is true", ); } - searchParams.append("volumeDataZipFormat", downloadFileFormat); + params.append("volumeDataZipFormat", downloadFileFormat); } - const downloadUrl = `/api/annotations/${annotationType}/${annotationId}/download?${searchParams}`; + const downloadUrl = `/api/annotations/${annotationType}/${annotationId}/download?${params}`; await downloadWithFilename(downloadUrl); } -// When the annotation is open, please use the corresponding method -// in api_latest.js. It will take care of saving the annotation and -// reloading it. -export async function downsampleSegmentation( - annotationId: string, - annotationType: APIAnnotationType, - tracingId: string, -): Promise { - await Request.receiveJSON( - `/api/annotations/${annotationType}/${annotationId}/downsample?tracingId=${tracingId}`, - { - method: "PATCH", - }, - ); -} // ### Datasets export async function getDatasets( isUnreported: boolean | null | undefined = null, @@ -1366,17 +1311,20 @@ export async function triggerDatasetClearCache( dataSourceId: APIDataSourceId, layerName?: string, ): Promise { - await doWithToken((token) => - Request.triggerRequest( - `/data/triggers/reload/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}?token=${token}${ - layerName ? `&layerName=${layerName}` : "" - }`, + await doWithToken((token) => { + const params = new URLSearchParams(); + params.append("token", token); + if (layerName) { + params.append("layerName", layerName); + } + return Request.triggerRequest( + `/data/triggers/reload/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}?${params}`, { host: datastoreHost, method: "POST", }, - ), - ); + ); + }); } export async function deleteDatasetOnDisk( @@ -1497,27 +1445,18 @@ export function fetchMapping( ); } -export function makeMappingEditable( - tracingStoreUrl: string, - tracingId: string, -): Promise { - return doWithToken((token) => - Request.receiveJSON( - `${tracingStoreUrl}/tracings/volume/${tracingId}/makeMappingEditable?token=${token}`, - { - method: "POST", - }, - ), - ); -} - export function getEditableMappingInfo( tracingStoreUrl: string, tracingId: string, + annotationId: string, ): Promise { - return doWithToken((token) => - Request.receiveJSON(`${tracingStoreUrl}/tracings/mapping/${tracingId}/info?token=${token}`), - ); + return doWithToken((token) => { + const params = new URLSearchParams({ + token, + annotationId: `${annotationId}`, + }); + return Request.receiveJSON(`${tracingStoreUrl}/tracings/mapping/${tracingId}/info?${params}`); + }); } export function getPositionForSegmentInAgglomerate( @@ -1528,14 +1467,14 @@ export function getPositionForSegmentInAgglomerate( segmentId: number, ): Promise { return doWithToken(async (token) => { - const urlParams = new URLSearchParams({ + const params = new URLSearchParams({ token, segmentId: `${segmentId}`, }); const position = await Request.receiveJSON( `${datastoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${ dataSourceId.directoryName - }/layers/${layerName}/agglomerates/${mappingName}/positionForSegment?${urlParams.toString()}`, + }/layers/${layerName}/agglomerates/${mappingName}/positionForSegment?${params.toString()}`, ); return position; }); @@ -1995,10 +1934,13 @@ export async function getAgglomeratesForSegmentsFromDatastore, ): Promise { + const params = new URLSearchParams(); + const segmentIdBuffer = serializeProtoListOfLong(segmentIds); - const listArrayBuffer: ArrayBuffer = await doWithToken((token) => - Request.receiveArraybuffer( - `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/agglomerates/${mappingId}/agglomeratesForSegments?token=${token}`, + const listArrayBuffer: ArrayBuffer = await doWithToken((token) => { + params.append("token", token); + return Request.receiveArraybuffer( + `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/agglomerates/${mappingId}/agglomeratesForSegments?${params}`, { method: "POST", body: segmentIdBuffer, @@ -2006,8 +1948,8 @@ export async function getAgglomeratesForSegmentsFromDatastore BigInt(el) @@ -2021,14 +1963,21 @@ export async function getAgglomeratesForSegmentsFromTracingstore, + annotationId: string, + version?: number | null | undefined, ): Promise { + const params = new URLSearchParams({ annotationId }); + if (version != null) { + params.append("version", version.toString()); + } const segmentIdBuffer = serializeProtoListOfLong( // The tracing store expects the ids to be sorted segmentIds.sort((a: T, b: T) => Number(a - b)), ); - const listArrayBuffer: ArrayBuffer = await doWithToken((token) => - Request.receiveArraybuffer( - `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomeratesForSegments?token=${token}`, + const listArrayBuffer: ArrayBuffer = await doWithToken((token) => { + params.append("token", token); + return Request.receiveArraybuffer( + `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomeratesForSegments?${params}`, { method: "POST", body: segmentIdBuffer, @@ -2036,8 +1985,8 @@ export async function getAgglomeratesForSegmentsFromTracingstore { return doWithToken((token) => Request.receiveArraybuffer( - `${tracingStoreUrl}/tracings/volume/${tracingId}/agglomerateSkeleton/${agglomerateId}?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomerateSkeleton/${agglomerateId}?token=${token}`, // The webworker code cannot do proper error handling and always expects an array buffer from the server. // However, the server might send an error json instead of an array buffer. Therefore, don't use the webworker code. { @@ -2218,7 +2167,7 @@ export async function getEdgesForAgglomerateMinCut( ): Promise> { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${tracingStoreUrl}/tracings/volume/${tracingId}/agglomerateGraphMinCut?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomerateGraphMinCut?token=${token}`, { data: { ...segmentsInfo, @@ -2249,7 +2198,7 @@ export async function getNeighborsForAgglomerateNode( ): Promise { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${tracingStoreUrl}/tracings/volume/${tracingId}/agglomerateGraphNeighbors?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomerateGraphNeighbors?token=${token}`, { data: { ...segmentInfo, diff --git a/frontend/javascripts/admin/statistic/time_tracking_detail_view.tsx b/frontend/javascripts/admin/statistic/time_tracking_detail_view.tsx index b60bb4dbd7f..4dc0d246ead 100644 --- a/frontend/javascripts/admin/statistic/time_tracking_detail_view.tsx +++ b/frontend/javascripts/admin/statistic/time_tracking_detail_view.tsx @@ -6,7 +6,6 @@ import { formatMilliseconds } from "libs/format_utils"; import _ from "lodash"; import type { APITimeTrackingPerAnnotation } from "types/api_flow_types"; import { AnnotationStats } from "oxalis/view/right-border-tabs/dataset_info_tab_view"; -import { aggregateStatsForAllLayers } from "oxalis/model/accessors/annotation_accessor"; import type { AnnotationTypeFilterEnum, AnnotationStateFilterEnum } from "oxalis/constants"; type TimeTrackingDetailViewProps = { @@ -40,7 +39,7 @@ const renderRow = ( @@ -63,7 +62,7 @@ const renderRow = ( diff --git a/frontend/javascripts/admin/task/task_create_form_view.tsx b/frontend/javascripts/admin/task/task_create_form_view.tsx index 3ec3298767f..d956d15abc7 100644 --- a/frontend/javascripts/admin/task/task_create_form_view.tsx +++ b/frontend/javascripts/admin/task/task_create_form_view.tsx @@ -34,7 +34,7 @@ import { Vector3Input, Vector6Input } from "libs/vector_input"; import type { Vector3, Vector6 } from "oxalis/constants"; import { getActiveDatasetsOfMyOrganization, - getAnnotationInformation, + getUnversionedAnnotationInformation, getProjects, getScripts, getTaskTypes, @@ -485,17 +485,11 @@ function TaskCreateFormView({ taskId, history }: Props) { return Promise.resolve(); } - const annotationResponse = - (await tryToAwaitPromise( - getAnnotationInformation(value, { - showErrorToast: false, - }), - )) || - (await tryToAwaitPromise( - getAnnotationInformation(value, { - showErrorToast: false, - }), - )); + const annotationResponse = await tryToAwaitPromise( + getUnversionedAnnotationInformation(value, { + showErrorToast: false, + }), + ); if (annotationResponse?.dataSetName != null) { form.setFieldsValue({ diff --git a/frontend/javascripts/dashboard/explorative_annotations_view.tsx b/frontend/javascripts/dashboard/explorative_annotations_view.tsx index 73e1b67726d..c0f1936425a 100644 --- a/frontend/javascripts/dashboard/explorative_annotations_view.tsx +++ b/frontend/javascripts/dashboard/explorative_annotations_view.tsx @@ -65,7 +65,6 @@ import { getVolumeDescriptors } from "oxalis/model/accessors/volumetracing_acces import { RenderToPortal } from "oxalis/view/layouting/portal_utils"; import { ActiveTabContext, RenderingTabContext } from "./dashboard_contexts"; import type { SearchProps } from "antd/lib/input"; -import { getCombinedStatsFromServerAnnotation } from "oxalis/model/accessors/annotation_accessor"; import { AnnotationStats } from "oxalis/view/right-border-tabs/dataset_info_tab_view"; const { Search } = Input; @@ -709,7 +708,10 @@ class ExplorativeAnnotationsView extends React.PureComponent { width: 150, render: (__: any, annotation: APIAnnotationInfo) => ( layer.tracingId), + (layer) => layer.stats, + )} asInfoBlock={false} withMargin={false} /> diff --git a/frontend/javascripts/messages.tsx b/frontend/javascripts/messages.tsx index bdfbd7dd051..98ef35199a2 100644 --- a/frontend/javascripts/messages.tsx +++ b/frontend/javascripts/messages.tsx @@ -130,8 +130,9 @@ A reload is necessary to return to a valid state.`, "There is no action that could be undone. However, if you want to restore an earlier version of this annotation, use the 'Restore Older Version' functionality in the dropdown next to the 'Save' button.", "undo.no_redo": "There is no action that could be redone.", "undo.no_undo_during_proofread": - "Undo is not supported during proofreading yet. Please manually revert the last action you took.", - "undo.no_redo_during_proofread": "Redo is not supported during proofreading yet.", + "Undo is not supported during proofreading yet. Please use the 'Restore Older Version' functionality in the dropdown next to the 'Save' button.", + "undo.no_redo_during_proofread": + "Redo is not supported during proofreading yet. Please use the 'Restore Older Version' functionality in the dropdown next to the 'Save' button.", "undo.import_volume_tracing": "Importing a volume annotation cannot be undone. However, if you want to restore an earlier version of this annotation, use the 'Restore Older Version' functionality in the dropdown next to the 'Save' button.", "download.wait": "Please wait...", diff --git a/frontend/javascripts/oxalis/api/api_latest.ts b/frontend/javascripts/oxalis/api/api_latest.ts index 924460157f8..e6435bca856 100644 --- a/frontend/javascripts/oxalis/api/api_latest.ts +++ b/frontend/javascripts/oxalis/api/api_latest.ts @@ -7,7 +7,6 @@ import { getConstructorForElementClass } from "oxalis/model/bucket_data_handling import { type APICompoundType, APICompoundTypeEnum, type ElementClass } from "types/api_flow_types"; import { InputKeyboardNoLoop } from "libs/input"; import { M4x4, type Matrix4x4, V3, type Vector16 } from "libs/mjs"; -import type { Versions } from "oxalis/view/version_view"; import { addTreesAndGroupsAction, setActiveNodeAction, @@ -46,7 +45,6 @@ import { doWithToken, finishAnnotation, getMappingsForDatasetLayer, - downsampleSegmentation, sendAnalyticsEvent, } from "admin/admin_rest_api"; import { @@ -1114,7 +1112,7 @@ class TracingApi { newMaybeCompoundType: APICompoundType | null, newAnnotationId: string, newControlMode: ControlMode, - versions?: Versions, + version?: number | undefined | null, keepUrlState: boolean = false, ) { if (newControlMode === ControlModeEnum.VIEW) @@ -1133,7 +1131,7 @@ class TracingApi { type: newControlMode, }, false, - versions, + version, ); Store.dispatch(discardSaveQueuesAction()); Store.dispatch(wkReadyAction()); @@ -1510,27 +1508,6 @@ class TracingApi { this.setAnnotationTool(tool); } - /** - * Use this method to create a complete magnification pyramid by downsampling the lowest present mag (e.g., mag 1). - This method will save the current changes and then reload the page after the downsampling - has finished. - This function can only be used for non-tasks. - Note that this invoking this method will not block the UI. Thus, user actions can be performed during the - downsampling. The caller should prohibit this (e.g., by showing a not-closable modal during the process). - */ - async downsampleSegmentation(volumeTracingId: string) { - const state = Store.getState(); - const { annotationId, annotationType } = state.tracing; - - if (state.task != null) { - throw new Error("Cannot downsample segmentation for a task."); - } - - await this.save(); - await downsampleSegmentation(annotationId, annotationType, volumeTracingId); - await this.hardReload(); - } - /** * Disables the saving for the current annotation. * WARNING: Cannot be undone. Only do this if you know what you are doing. diff --git a/frontend/javascripts/oxalis/api/wk_dev.ts b/frontend/javascripts/oxalis/api/wk_dev.ts index d3b5c4d7f16..bd8cb397adf 100644 --- a/frontend/javascripts/oxalis/api/wk_dev.ts +++ b/frontend/javascripts/oxalis/api/wk_dev.ts @@ -10,6 +10,7 @@ import _ from "lodash"; // Can be accessed via window.webknossos.DEV.flags. Only use this // for debugging or one off scripts. export const WkDevFlags = { + logActions: false, sam: { useLocalMask: true, }, diff --git a/frontend/javascripts/oxalis/controller.tsx b/frontend/javascripts/oxalis/controller.tsx index 0912a7babe9..537977d8e9d 100644 --- a/frontend/javascripts/oxalis/controller.tsx +++ b/frontend/javascripts/oxalis/controller.tsx @@ -90,14 +90,12 @@ class Controller extends React.PureComponent { tryFetchingModel() { this.props.setControllerStatus("loading"); // Preview a working annotation version if the showVersionRestore URL parameter is supplied - const versions = Utils.hasUrlParam("showVersionRestore") - ? { - skeleton: Utils.hasUrlParam("skeletonVersion") - ? Number.parseInt(Utils.getUrlParamValue("skeletonVersion")) - : 1, - } + const version = Utils.hasUrlParam("showVersionRestore") + ? Utils.hasUrlParam("version") + ? Number.parseInt(Utils.getUrlParamValue("version")) + : 1 : undefined; - Model.fetch(this.props.initialMaybeCompoundType, this.props.initialCommandType, true, versions) + Model.fetch(this.props.initialMaybeCompoundType, this.props.initialCommandType, true, version) .then(() => this.modelFetchDone()) .catch((error) => { this.props.setControllerStatus("failedLoading"); diff --git a/frontend/javascripts/oxalis/default_state.ts b/frontend/javascripts/oxalis/default_state.ts index da524d26c4f..807fc201a25 100644 --- a/frontend/javascripts/oxalis/default_state.ts +++ b/frontend/javascripts/oxalis/default_state.ts @@ -165,7 +165,6 @@ const defaultState: OxalisState = { boundingBox: null, createdTimestamp: 0, type: "readonly", - version: 0, tracingId: "", additionalAxes: [], }, @@ -178,24 +177,15 @@ const defaultState: OxalisState = { othersMayEdit: false, blockedByUser: null, annotationLayers: [], + version: 0, + earliestAccessibleVersion: 0, + stats: {}, organization: "", }, save: { - queue: { - skeleton: [], - volumes: {}, - mappings: {}, - }, - isBusyInfo: { - skeleton: false, - volumes: {}, - mappings: {}, - }, - lastSaveTimestamp: { - skeleton: 0, - volumes: {}, - mappings: {}, - }, + queue: [], + isBusy: false, + lastSaveTimestamp: 0, progressInfo: { processedActionCount: 0, totalActionCount: 0, diff --git a/frontend/javascripts/oxalis/geometries/skeleton.ts b/frontend/javascripts/oxalis/geometries/skeleton.ts index eb4af6c3f14..77941c66f29 100644 --- a/frontend/javascripts/oxalis/geometries/skeleton.ts +++ b/frontend/javascripts/oxalis/geometries/skeleton.ts @@ -316,7 +316,11 @@ class Skeleton { */ refresh(skeletonTracing: SkeletonTracing) { const state = Store.getState(); - const diff = cachedDiffTrees(this.prevTracing.trees, skeletonTracing.trees); + const diff = cachedDiffTrees( + skeletonTracing.tracingId, + this.prevTracing.trees, + skeletonTracing.trees, + ); for (const update of diff) { switch (update.name) { diff --git a/frontend/javascripts/oxalis/merger_mode.ts b/frontend/javascripts/oxalis/merger_mode.ts index d934f23596d..5a4176e4589 100644 --- a/frontend/javascripts/oxalis/merger_mode.ts +++ b/frontend/javascripts/oxalis/merger_mode.ts @@ -265,7 +265,11 @@ async function onUpdateNode(mergerModeState: MergerModeState, node: UpdateAction // If the segment of the node changed, it is like the node got deleted and a copy got created somewhere else. // Thus we use the onNodeDelete and onNodeCreate method to update the mapping. if (nodeSegmentMap[id] != null) { - await onDeleteNode(mergerModeState, { nodeId: id, treeId }, false); + await onDeleteNode( + mergerModeState, + { nodeId: id, treeId, actionTracingId: mergerModeState.prevTracing.tracingId }, + false, + ); } if (segmentId != null && segmentId > 0) { @@ -287,7 +291,11 @@ async function onUpdateNode(mergerModeState: MergerModeState, node: UpdateAction } function updateState(mergerModeState: MergerModeState, skeletonTracing: SkeletonTracing) { - const diff = cachedDiffTrees(mergerModeState.prevTracing.trees, skeletonTracing.trees); + const diff = cachedDiffTrees( + skeletonTracing.tracingId, + mergerModeState.prevTracing.trees, + skeletonTracing.trees, + ); for (const action of diff) { switch (action.name) { diff --git a/frontend/javascripts/oxalis/model.ts b/frontend/javascripts/oxalis/model.ts index f2d9d5db78b..07f84588d31 100644 --- a/frontend/javascripts/oxalis/model.ts +++ b/frontend/javascripts/oxalis/model.ts @@ -1,6 +1,5 @@ import _ from "lodash"; import type { Vector3 } from "oxalis/constants"; -import type { Versions } from "oxalis/view/version_view"; import { getActiveSegmentationTracingLayer } from "oxalis/model/accessors/volumetracing_accessor"; import { getActiveMagIndexForLayer } from "oxalis/model/accessors/flycam_accessor"; import { @@ -9,7 +8,6 @@ import { isLayerVisible, } from "oxalis/model/accessors/dataset_accessor"; import { getTotalSaveQueueLength } from "oxalis/model/reducers/save_reducer"; -import { isBusy } from "oxalis/model/accessors/save_accessor"; import { isDatasetAccessibleBySwitching } from "admin/admin_rest_api"; import { saveNowAction } from "oxalis/model/actions/save_actions"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; @@ -33,14 +31,14 @@ export class OxalisModel { initialMaybeCompoundType: APICompoundType | null, initialCommandType: TraceOrViewCommand, initialFetch: boolean, - versions?: Versions, + version?: number | undefined | null, ) { try { const initializationInformation = await initialize( initialMaybeCompoundType, initialCommandType, initialFetch, - versions, + version, ); if (initializationInformation) { @@ -283,8 +281,7 @@ export class OxalisModel { stateSaved() { const state = Store.getState(); - const storeStateSaved = - !isBusy(state.save.isBusyInfo) && getTotalSaveQueueLength(state.save.queue) === 0; + const storeStateSaved = !state.save.isBusy && getTotalSaveQueueLength(state.save.queue) === 0; const pushQueuesSaved = _.reduce( this.dataLayers, @@ -341,7 +338,7 @@ export class OxalisModel { // The dispatch of the saveNowAction IN the while loop is deliberate. // Otherwise if an update action is pushed to the save queue during the Utils.sleep, // the while loop would continue running until the next save would be triggered. - if (!isBusy(Store.getState().save.isBusyInfo)) { + if (!Store.getState().save.isBusy) { Store.dispatch(saveNowAction()); } diff --git a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts index 90f5a9f4903..5fdb9a14233 100644 --- a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts @@ -1,7 +1,5 @@ import _ from "lodash"; import type { OxalisState, Tracing } from "oxalis/store"; -import { getVolumeTracingById } from "./volumetracing_accessor"; -import type { APIAnnotationInfo } from "types/api_flow_types"; import type { EmptyObject } from "types/globals"; export function mayEditAnnotationProperties(state: OxalisState) { @@ -41,102 +39,48 @@ export type VolumeTracingStats = { segmentCount: number; }; -export type TracingStats = SkeletonTracingStats | VolumeTracingStats; -type TracingStatsHelper = { - treeCount?: number; - nodeCount?: number; - edgeCount?: number; - branchPointCount?: number; - segmentCount?: number; -}; - -// biome-ignore lint/complexity/noBannedTypes: {} should be avoided actually -export type CombinedTracingStats = (SkeletonTracingStats | {}) & (VolumeTracingStats | {}); +export type TracingStats = Record; -export function getStats( - tracing: Tracing, - saveQueueType: "skeleton" | "volume" | "mapping", - tracingId: string, -): TracingStats | null { - switch (saveQueueType) { - case "skeleton": { - if (!tracing.skeleton) { - return null; - } - const trees = tracing.skeleton.trees; - return { - treeCount: _.size(trees), - nodeCount: _.reduce(trees, (sum, tree) => sum + tree.nodes.size(), 0), - edgeCount: _.reduce(trees, (sum, tree) => sum + tree.edges.size(), 0), - branchPointCount: _.reduce(trees, (sum, tree) => sum + _.size(tree.branchPoints), 0), - }; - } - case "volume": { - const volumeTracing = getVolumeTracingById(tracing, tracingId); - return { - segmentCount: volumeTracing.segments.size(), - }; - } - default: - return null; +export function getStats(tracing: Tracing): TracingStats { + const stats: TracingStats = {}; + const { skeleton, volumes } = tracing; + for (const volumeTracing of volumes) { + stats[volumeTracing.tracingId] = { segmentCount: volumeTracing.segments.size() }; } -} - -export function getCombinedStats(tracing: Tracing): CombinedTracingStats { - const aggregatedStats: TracingStatsHelper = {}; - - if (tracing.skeleton) { - const skeletonStats = getStats(tracing, "skeleton", tracing.skeleton.tracingId); - if (skeletonStats && "treeCount" in skeletonStats) { - const { treeCount, nodeCount, edgeCount, branchPointCount } = skeletonStats; - aggregatedStats.treeCount = treeCount; - aggregatedStats.nodeCount = nodeCount; - aggregatedStats.edgeCount = edgeCount; - aggregatedStats.branchPointCount = branchPointCount; - } + if (skeleton) { + stats[skeleton.tracingId] = { + treeCount: _.size(skeleton.trees), + nodeCount: _.reduce(skeleton.trees, (sum, tree) => sum + tree.nodes.size(), 0), + edgeCount: _.reduce(skeleton.trees, (sum, tree) => sum + tree.edges.size(), 0), + branchPointCount: _.reduce(skeleton.trees, (sum, tree) => sum + _.size(tree.branchPoints), 0), + }; } + return stats; +} +export function getCreationTimestamp(tracing: Tracing) { + let timestamp = tracing.skeleton?.createdTimestamp; for (const volumeTracing of tracing.volumes) { - const volumeStats = getStats(tracing, "volume", volumeTracing.tracingId); - if (volumeStats && "segmentCount" in volumeStats) { - if (aggregatedStats.segmentCount == null) { - aggregatedStats.segmentCount = 0; - } - aggregatedStats.segmentCount += volumeStats.segmentCount; + if (!timestamp || volumeTracing.createdTimestamp < timestamp) { + timestamp = volumeTracing.createdTimestamp; } } - - return aggregatedStats; -} - -export function getCombinedStatsFromServerAnnotation( - annotation: APIAnnotationInfo, -): CombinedTracingStats { - return aggregateStatsForAllLayers( - annotation.annotationLayers.map((annotation) => annotation.stats), - ); + return timestamp || 0; } -export function aggregateStatsForAllLayers( - stats: Array, -): CombinedTracingStats { - const aggregatedStats: TracingStatsHelper = {}; - - for (const annotationLayerStats of stats) { - if ("treeCount" in annotationLayerStats) { - const { treeCount, nodeCount, edgeCount, branchPointCount } = annotationLayerStats; - aggregatedStats.treeCount = treeCount; - aggregatedStats.nodeCount = nodeCount; - aggregatedStats.edgeCount = edgeCount; - aggregatedStats.branchPointCount = branchPointCount; - } else if ("segmentCount" in annotationLayerStats) { - if (aggregatedStats.segmentCount == null) { - aggregatedStats.segmentCount = 0; - } - - aggregatedStats.segmentCount += annotationLayerStats.segmentCount; +export function getSkeletonStats(stats: TracingStats): SkeletonTracingStats | undefined { + for (const tracingId in stats) { + if ("treeCount" in stats[tracingId]) { + // TS thinks the return value could be EmptyObject even though + // we just checked that treeCount is a property. + return stats[tracingId] as SkeletonTracingStats; } } + return undefined; +} - return aggregatedStats; +export function getVolumeStats(stats: TracingStats): [string, VolumeTracingStats][] { + return Array.from(Object.entries(stats)).filter( + ([_tracingId, stat]) => "segmentCount" in stat, + ) as [string, VolumeTracingStats][]; } diff --git a/frontend/javascripts/oxalis/model/accessors/save_accessor.ts b/frontend/javascripts/oxalis/model/accessors/save_accessor.ts deleted file mode 100644 index e35ed69805d..00000000000 --- a/frontend/javascripts/oxalis/model/accessors/save_accessor.ts +++ /dev/null @@ -1,27 +0,0 @@ -import type { IsBusyInfo, OxalisState, SaveQueueEntry } from "oxalis/store"; -import type { SaveQueueType } from "oxalis/model/actions/save_actions"; -import * as Utils from "libs/utils"; - -export function isBusy(isBusyInfo: IsBusyInfo): boolean { - return ( - isBusyInfo.skeleton || - Utils.values(isBusyInfo.volumes).some((el) => el) || - Utils.values(isBusyInfo.mappings).some((el) => el) - ); -} -export function selectQueue( - state: OxalisState, - saveQueueType: SaveQueueType, - tracingId: string, -): Array { - switch (saveQueueType) { - case "skeleton": - return state.save.queue.skeleton; - case "volume": - return state.save.queue.volumes[tracingId]; - case "mapping": - return state.save.queue.mappings[tracingId]; - default: - throw new Error(`Unknown save queue type: ${saveQueueType}`); - } -} diff --git a/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts b/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts index 657a18ca2e0..44eb0c995ef 100644 --- a/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts @@ -1,10 +1,11 @@ import Maybe from "data.maybe"; import _ from "lodash"; -import type { - ServerTracing, - ServerSkeletonTracing, - APIAnnotation, - AnnotationLayerDescriptor, +import { + type ServerTracing, + type ServerSkeletonTracing, + type APIAnnotation, + type AnnotationLayerDescriptor, + AnnotationLayerEnum, } from "types/api_flow_types"; import type { Tracing, @@ -41,7 +42,7 @@ export function getSkeletonDescriptor( annotation: APIAnnotation, ): AnnotationLayerDescriptor | null | undefined { const skeletonLayers = annotation.annotationLayers.filter( - (descriptor) => descriptor.typ === "Skeleton", + (descriptor) => descriptor.typ === AnnotationLayerEnum.Skeleton, ); if (skeletonLayers.length > 0) { diff --git a/frontend/javascripts/oxalis/model/actions/annotation_actions.ts b/frontend/javascripts/oxalis/model/actions/annotation_actions.ts index 1aa7ff5e470..7879b2f17de 100644 --- a/frontend/javascripts/oxalis/model/actions/annotation_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/annotation_actions.ts @@ -1,5 +1,4 @@ import type { - APIAnnotation, APIAnnotationVisibility, APIDataLayer, APIDataset, @@ -8,6 +7,7 @@ import type { EditableLayerProperties, } from "types/api_flow_types"; import type { + Annotation, MappingType, UserBoundingBox, UserBoundingBoxWithoutId, @@ -20,10 +20,10 @@ import Deferred from "libs/async/deferred"; import type { AdditionalCoordinate } from "types/api_flow_types"; type InitializeAnnotationAction = ReturnType; -type SetAnnotationNameAction = ReturnType; +export type SetAnnotationNameAction = ReturnType; type SetAnnotationVisibilityAction = ReturnType; export type EditAnnotationLayerAction = ReturnType; -type SetAnnotationDescriptionAction = ReturnType; +export type SetAnnotationDescriptionAction = ReturnType; type SetAnnotationAllowUpdateAction = ReturnType; type SetBlockedByUserAction = ReturnType; type SetUserBoundingBoxesAction = ReturnType; @@ -95,7 +95,7 @@ export const AllUserBoundingBoxActions = [ "DELETE_USER_BOUNDING_BOX", "ADD_USER_BOUNDING_BOXES", ]; -export const initializeAnnotationAction = (annotation: APIAnnotation) => +export const initializeAnnotationAction = (annotation: Annotation) => ({ type: "INITIALIZE_ANNOTATION", annotation, diff --git a/frontend/javascripts/oxalis/model/actions/save_actions.ts b/frontend/javascripts/oxalis/model/actions/save_actions.ts index dca4997b9f6..94ecb79d769 100644 --- a/frontend/javascripts/oxalis/model/actions/save_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/save_actions.ts @@ -1,11 +1,19 @@ import type { Dispatch } from "redux"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { + UpdateAction, + UpdateActionWithoutIsolationRequirement, + UpdateActionWithIsolationRequirement, +} from "oxalis/model/sagas/update_actions"; import { getUid } from "libs/uid_generator"; import Date from "libs/date"; import Deferred from "libs/async/deferred"; export type SaveQueueType = "skeleton" | "volume" | "mapping"; -export type PushSaveQueueTransaction = ReturnType; +export type PushSaveQueueTransaction = { + type: "PUSH_SAVE_QUEUE_TRANSACTION"; + items: UpdateAction[]; + transactionId: string; +}; type SaveNowAction = ReturnType; export type ShiftSaveQueueAction = ReturnType; type DiscardSaveQueuesAction = ReturnType; @@ -28,18 +36,25 @@ export type SaveAction = | RedoAction | DisableSavingAction; +// The action creators pushSaveQueueTransaction and pushSaveQueueTransactionIsolated +// are typed so that update actions that need isolation are isolated in a group each. +// From this point on, we can assume that the groups fulfil the isolation requirement. export const pushSaveQueueTransaction = ( - items: Array, - saveQueueType: SaveQueueType, - tracingId: string, - transactionId: string = getUid(), -) => + items: Array, +): PushSaveQueueTransaction => ({ type: "PUSH_SAVE_QUEUE_TRANSACTION", items, - saveQueueType, - tracingId, - transactionId, + transactionId: getUid(), + }) as const; + +export const pushSaveQueueTransactionIsolated = ( + item: UpdateActionWithIsolationRequirement, +): PushSaveQueueTransaction => + ({ + type: "PUSH_SAVE_QUEUE_TRANSACTION", + items: [item], + transactionId: getUid(), }) as const; export const saveNowAction = () => @@ -47,16 +62,10 @@ export const saveNowAction = () => type: "SAVE_NOW", }) as const; -export const shiftSaveQueueAction = ( - count: number, - saveQueueType: SaveQueueType, - tracingId: string, -) => +export const shiftSaveQueueAction = (count: number) => ({ type: "SHIFT_SAVE_QUEUE", count, - saveQueueType, - tracingId, }) as const; export const discardSaveQueuesAction = () => @@ -64,36 +73,22 @@ export const discardSaveQueuesAction = () => type: "DISCARD_SAVE_QUEUES", }) as const; -export const setSaveBusyAction = ( - isBusy: boolean, - saveQueueType: SaveQueueType, - tracingId: string, -) => +export const setSaveBusyAction = (isBusy: boolean) => ({ type: "SET_SAVE_BUSY", isBusy, - saveQueueType, - tracingId, }) as const; -export const setLastSaveTimestampAction = (saveQueueType: SaveQueueType, tracingId: string) => +export const setLastSaveTimestampAction = () => ({ type: "SET_LAST_SAVE_TIMESTAMP", timestamp: Date.now(), - saveQueueType, - tracingId, }) as const; -export const setVersionNumberAction = ( - version: number, - saveQueueType: SaveQueueType, - tracingId: string, -) => +export const setVersionNumberAction = (version: number) => ({ type: "SET_VERSION_NUMBER", version, - saveQueueType, - tracingId, }) as const; export const undoAction = (callback?: () => void) => diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts index 8d39d019265..31ba39ac814 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts @@ -306,7 +306,7 @@ export default class LayerRenderingManager { (storeState) => getSegmentsForLayer(storeState, this.name), (newSegments) => { const cuckoo = this.getCustomColorCuckooTable(); - for (const updateAction of cachedDiffSegmentLists(prevSegments, newSegments)) { + for (const updateAction of cachedDiffSegmentLists(this.name, prevSegments, newSegments)) { if ( updateAction.name === "updateSegment" || updateAction.name === "createSegment" || diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts index 894261633a5..4827ced6328 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts @@ -5,7 +5,7 @@ import { createDebouncedAbortableParameterlessCallable } from "libs/async/deboun import { call } from "redux-saga/effects"; import Store from "oxalis/store"; import { pushSaveQueueTransaction } from "../actions/save_actions"; -import type { UpdateAction } from "../sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "../sagas/update_actions"; import { AsyncFifoResolver } from "libs/async/async_fifo_resolver"; import { escalateErrorAction } from "../actions/actions"; @@ -17,6 +17,7 @@ const PUSH_DEBOUNCE_TIME = 1000; class PushQueue { cube: DataCube; + tracingId: string; // The pendingBuckets contains all buckets that should be: // - snapshotted, @@ -34,15 +35,16 @@ class PushQueue { // Helper to ensure the Store's save queue is filled in the correct // order. - private fifoResolver = new AsyncFifoResolver(); + private fifoResolver = new AsyncFifoResolver(); // If the timestamp is defined, it encodes when the first bucket // was added to the PushQueue that will be part of the next (to be created) // transaction. private waitTimeStartTimeStamp: number | null = null; - constructor(cube: DataCube) { + constructor(cube: DataCube, tracingId: string) { this.cube = cube; + this.tracingId = tracingId; this.pendingBuckets = new Set(); } @@ -131,7 +133,7 @@ class PushQueue { push = createDebouncedAbortableParameterlessCallable(this.pushImpl, PUSH_DEBOUNCE_TIME, this); - async pushTransaction(batch: Array): Promise { + private async pushTransaction(batch: Array): Promise { /* * Create a transaction from the batch and push it into the save queue. */ @@ -152,7 +154,7 @@ class PushQueue { const items = await this.fifoResolver.orderedWaitFor( createCompressedUpdateBucketActions(batch), ); - Store.dispatch(pushSaveQueueTransaction(items, "volume", this.cube.layerName)); + Store.dispatch(pushSaveQueueTransaction(items)); this.compressingBucketCount -= batch.length; } catch (error) { diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts index f0237087f41..c1e4662b854 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts @@ -13,7 +13,7 @@ import { needsLocalHdf5Mapping, } from "oxalis/model/accessors/volumetracing_accessor"; import { parseMaybe } from "libs/utils"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "oxalis/model/sagas/update_actions"; import { updateBucket } from "oxalis/model/sagas/update_actions"; import ByteArraysToLz4Base64Worker from "oxalis/workers/byte_arrays_to_lz4_base64.worker"; import DecodeFourBitWorker from "oxalis/workers/decode_four_bit.worker"; @@ -124,7 +124,13 @@ export async function requestWithFallback( const requestUrl = shouldUseDataStore ? getDataStoreUrl(maybeVolumeTracing?.fallbackLayer) : getTracingStoreUrl(); - const bucketBuffers = await requestFromStore(requestUrl, layerInfo, batch, maybeVolumeTracing); + const bucketBuffers = await requestFromStore( + requestUrl, + layerInfo, + batch, + maybeVolumeTracing, + maybeVolumeTracing != null ? state.tracing.annotationId : undefined, + ); const missingBucketIndices = getNullIndices(bucketBuffers); // If buckets could not be found on the tracing store (e.g. this happens when the buckets @@ -154,6 +160,7 @@ export async function requestWithFallback( layerInfo, fallbackBatch, maybeVolumeTracing, + maybeVolumeTracing != null ? state.tracing.annotationId : undefined, true, ); return bucketBuffers.map((bucket, idx) => { @@ -170,6 +177,7 @@ export async function requestFromStore( layerInfo: DataLayerType, batch: Array, maybeVolumeTracing: VolumeTracing | null | undefined, + maybeAnnotationId: string | undefined, isVolumeFallback: boolean = false, ): Promise> { const state = Store.getState(); @@ -198,7 +206,7 @@ export async function requestFromStore( const magInfo = getMagInfo(layerInfo.resolutions); const version = !isVolumeFallback && isSegmentation && maybeVolumeTracing != null - ? maybeVolumeTracing.version + ? state.tracing.version : null; const bucketInfo = batch.map((zoomedAddress) => createRequestBucketInfo( @@ -213,8 +221,14 @@ export async function requestFromStore( try { return await doWithToken(async (token) => { const startingTime = window.performance.now(); + const params = new URLSearchParams({ + token, + }); + if (maybeAnnotationId != null) { + params.append("annotationId", maybeAnnotationId); + } const { buffer: responseBuffer, headers } = - await Request.sendJSONReceiveArraybufferWithHeaders(`${dataUrl}/data?token=${token}`, { + await Request.sendJSONReceiveArraybufferWithHeaders(`${dataUrl}/data?${params}`, { data: bucketInfo, timeout: REQUEST_TIMEOUT, showErrorToast: false, @@ -273,7 +287,7 @@ function sliceBufferIntoPieces( export async function createCompressedUpdateBucketActions( batch: Array, -): Promise { +): Promise { return _.flatten( await Promise.all( _.chunk(batch, COMPRESSION_BATCH_SIZE).map(async (batchSubset) => { @@ -286,7 +300,7 @@ export async function createCompressedUpdateBucketActions( return compressedBase64Strings.map((compressedBase64, index) => { const bucket = batchSubset[index]; const bucketInfo = createSendBucketInfo(bucket.zoomedAddress, bucket.cube.magInfo); - return updateBucket(bucketInfo, compressedBase64); + return updateBucket(bucketInfo, compressedBase64, bucket.getTracingId()); }); }), ), diff --git a/frontend/javascripts/oxalis/model/data_layer.ts b/frontend/javascripts/oxalis/model/data_layer.ts index ee53bf58b4e..c5fb2fe6c39 100644 --- a/frontend/javascripts/oxalis/model/data_layer.ts +++ b/frontend/javascripts/oxalis/model/data_layer.ts @@ -21,7 +21,12 @@ class DataLayer { fallbackLayerInfo: DataLayerType | null | undefined; isSegmentation: boolean; - constructor(layerInfo: DataLayerType, textureWidth: number, dataTextureCount: number) { + constructor( + layerInfo: DataLayerType, + textureWidth: number, + dataTextureCount: number, + tracingId: string, + ) { this.name = layerInfo.name; this.fallbackLayer = "fallbackLayer" in layerInfo && layerInfo.fallbackLayer != null @@ -46,7 +51,7 @@ class DataLayer { this.name, ); this.pullQueue = new PullQueue(this.cube, layerInfo.name, dataset.dataStore); - this.pushQueue = new PushQueue(this.cube); + this.pushQueue = new PushQueue(this.cube, tracingId); this.cube.initializeWithQueues(this.pullQueue, this.pushQueue); if (this.isSegmentation) { diff --git a/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts b/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts index 7d444e1b9a3..45b027af15c 100644 --- a/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts +++ b/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts @@ -1,6 +1,7 @@ import _ from "lodash"; import type { Dispatch } from "redux"; import type { Action } from "oxalis/model/actions/actions"; +import { WkDevFlags } from "oxalis/api/wk_dev"; const MAX_ACTION_LOG_LENGTH = 250; let actionLog: string[] = []; @@ -9,8 +10,6 @@ let actionLog: string[] = []; let lastActionName: string | null = null; let lastActionCount: number = 0; -const DEBUG_OUTPUT_FOR_ACTIONS = false; - const actionBlacklist = [ "ADD_TO_LAYER", "MOVE_FLYCAM", @@ -51,7 +50,7 @@ export default function actionLoggerMiddleware(): ( const overflowCount = Math.max(actionLog.length - MAX_ACTION_LOG_LENGTH, 0); actionLog = _.drop(actionLog, overflowCount); - if (DEBUG_OUTPUT_FOR_ACTIONS) { + if (WkDevFlags.logActions) { console.group(action.type); console.info("dispatching", action); let result = next(action); diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts index 6ab8bab4525..7504a03eded 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts @@ -3,11 +3,18 @@ import type { SaveQueueEntry } from "oxalis/store"; function removeAllButLastUpdateTracingAction(updateActionsBatches: Array) { // This part of the code removes all entries from the save queue that consist only of - // one updateTracing update action, except for the last one - const updateTracingOnlyBatches = updateActionsBatches.filter( - (batch) => batch.actions.length === 1 && batch.actions[0].name === "updateTracing", + // one update{Skeleton,Volume}Tracing update action, except for the last one + const updateSkeletonTracingOnlyBatches = updateActionsBatches.filter( + (batch) => batch.actions.length === 1 && batch.actions[0].name === "updateSkeletonTracing", + ); + const updateVolumeTracingOnlyBatches = updateActionsBatches.filter( + (batch) => batch.actions.length === 1 && batch.actions[0].name === "updateVolumeTracing", + ); + return _.without( + updateActionsBatches, + ...updateSkeletonTracingOnlyBatches.slice(0, -1), + ...updateVolumeTracingOnlyBatches.slice(0, -1), ); - return _.without(updateActionsBatches, ...updateTracingOnlyBatches.slice(0, -1)); } function removeAllButLastUpdateTdCameraAction(updateActionsBatches: Array) { diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts index cc7ee5af199..63efbb9106d 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts @@ -6,7 +6,7 @@ import _ from "lodash"; import type { SkeletonTracing, Tree, TreeGroup, TreeMap, VolumeTracing } from "oxalis/store"; import type { - UpdateAction, + UpdateActionWithoutIsolationRequirement, UpdateTreeVisibilityUpdateAction, } from "oxalis/model/sagas/update_actions"; import { updateTreeGroupVisibility, updateTreeVisibility } from "oxalis/model/sagas/update_actions"; @@ -137,9 +137,9 @@ function isCommonAncestorToggler( } export default function compactToggleActions( - updateActions: UpdateAction[], + updateActions: UpdateActionWithoutIsolationRequirement[], tracing: SkeletonTracing | VolumeTracing, -): UpdateAction[] { +): UpdateActionWithoutIsolationRequirement[] { if (tracing.type !== "skeleton") { // Don't do anything if this is not a skeleton tracing return updateActions; @@ -148,7 +148,7 @@ export default function compactToggleActions( const skeletonTracing = tracing; // Extract the toggleActions which we are interested in - const [toggleActions, remainingActions] = _.partition( + const [toggleActions, remainingActions] = _.partition( updateActions, (ua) => ua.name === "updateTreeVisibility", ); @@ -176,8 +176,8 @@ export default function compactToggleActions( // If less than 50% of the toggled trees are exceptions, we should use the compaction const shouldUseToggleGroup = exceptions.length < 0.5 * affectedTreeCount; const compactedToggleActions = [ - updateTreeGroupVisibility(commonAncestor, commonVisibility), - ...exceptions.map((tree) => updateTreeVisibility(tree)), + updateTreeGroupVisibility(commonAncestor, commonVisibility, tracing.tracingId), + ...exceptions.map((tree) => updateTreeVisibility(tree, tracing.tracingId)), ]; const finalToggleActions = shouldUseToggleGroup ? compactedToggleActions : toggleActions; return remainingActions.concat(finalToggleActions); diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts index b16e490e5e8..674c28c256c 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts @@ -6,7 +6,7 @@ import type { DeleteEdgeUpdateAction, DeleteNodeUpdateAction, DeleteTreeUpdateAction, - UpdateAction, + UpdateActionWithoutIsolationRequirement, } from "oxalis/model/sagas/update_actions"; import { moveTreeComponent } from "oxalis/model/sagas/update_actions"; import compactToggleActions from "oxalis/model/helpers/compaction/compact_toggle_actions"; @@ -17,7 +17,7 @@ function cantor(a: number, b: number): number { return 0.5 * (a + b) * (a + b + 1) + b; } -function compactMovedNodesAndEdges(updateActions: Array) { +function compactMovedNodesAndEdges(updateActions: Array) { // This function detects tree merges and splits. // It does so by identifying nodes and edges that were deleted in one tree only to be created // in another tree again afterwards. @@ -78,6 +78,7 @@ function compactMovedNodesAndEdges(updateActions: Array) { // Create a moveTreeComponent update action for each of the groups and insert it at the right spot for (const movedPairings of _.values(groupedMovedNodesAndEdges)) { + const actionTracingId = movedPairings[0][1].value.actionTracingId; const oldTreeId = movedPairings[0][1].value.treeId; const newTreeId = movedPairings[0][0].value.treeId; // This could be done with a .filter(...).map(...), but flow cannot comprehend that @@ -105,18 +106,18 @@ function compactMovedNodesAndEdges(updateActions: Array) { compactedActions.splice( createTreeUAIndex + 1, 0, - moveTreeComponent(oldTreeId, newTreeId, nodeIds), + moveTreeComponent(oldTreeId, newTreeId, nodeIds, actionTracingId), ); } else if (deleteTreeUAIndex > -1) { // Insert before the deleteTreeUA compactedActions.splice( deleteTreeUAIndex, 0, - moveTreeComponent(oldTreeId, newTreeId, nodeIds), + moveTreeComponent(oldTreeId, newTreeId, nodeIds, actionTracingId), ); } else { // Insert in front - compactedActions.unshift(moveTreeComponent(oldTreeId, newTreeId, nodeIds)); + compactedActions.unshift(moveTreeComponent(oldTreeId, newTreeId, nodeIds, actionTracingId)); } // Remove the original create/delete update actions of the moved nodes and edges. @@ -135,7 +136,7 @@ function compactMovedNodesAndEdges(updateActions: Array) { return compactedActions; } -function compactDeletedTrees(updateActions: Array) { +function compactDeletedTrees(updateActions: Array) { // This function detects deleted trees. // Instead of sending deleteNode/deleteEdge update actions for all nodes of a deleted tree, // just one deleteTree update action is sufficient for the server to delete the tree. @@ -155,9 +156,9 @@ function compactDeletedTrees(updateActions: Array) { } export default function compactUpdateActions( - updateActions: Array, + updateActions: Array, tracing: SkeletonTracing | VolumeTracing, -): Array { +): Array { return compactToggleActions( compactDeletedTrees(compactMovedNodesAndEdges(updateActions)), tracing, diff --git a/frontend/javascripts/oxalis/model/helpers/generate_dummy_trees.ts b/frontend/javascripts/oxalis/model/helpers/generate_dummy_trees.ts index be73a5c3161..6bb1d0d26e1 100644 --- a/frontend/javascripts/oxalis/model/helpers/generate_dummy_trees.ts +++ b/frontend/javascripts/oxalis/model/helpers/generate_dummy_trees.ts @@ -15,7 +15,7 @@ export default function generateDummyTrees( function generateDummyTree(): ServerSkeletonTracingTree { const nodes = []; const edges = []; - let counter = -1; + let counter = 0; const initialNodeId = currentNewNodeId; while (counter++ < nodeCount) { @@ -43,7 +43,7 @@ export default function generateDummyTrees( counter = 0; - while (counter++ < nodeCount) { + while (counter++ < nodeCount - 1) { edges.push({ source: initialNodeId + counter, target: initialNodeId + counter - 1, diff --git a/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts b/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts index cd3430779d9..64cd2456e71 100644 --- a/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts +++ b/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts @@ -1,9 +1,11 @@ import { Root } from "protobufjs/light"; -import type { ServerTracing } from "types/api_flow_types"; +import type { APITracingStoreAnnotation, ServerTracing } from "types/api_flow_types"; // @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'SkeletonTracing.proto' or its ... Remove this comment to see the full error message import SkeletonTracingProto from "SkeletonTracing.proto"; // @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'VolumeTracing.proto' or its co... Remove this comment to see the full error message import VolumeTracingProto from "VolumeTracing.proto"; +// @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'AnnotationProto.proto' or its co... Remove this comment to see the full error message +import AnnotationProto from "Annotation.proto"; // @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'ListOfLong.proto' or its co... Remove this comment to see the full error message import ListOfLongProto from "ListOfLong.proto"; import { isBigInt } from "libs/utils"; @@ -25,12 +27,14 @@ export function parseProtoTracing( const protoRoot = Root.fromJSON(PROTO_FILES[annotationType]); const messageType = protoRoot.lookupType(PROTO_TYPES[annotationType]); const message = messageType.decode(new Uint8Array(tracingArrayBuffer)); - return messageType.toObject(message, { + const tracing = messageType.toObject(message, { arrays: true, objects: true, enums: String, longs: Number, }) as ServerTracing; + delete tracing.version; + return tracing; } export function serializeProtoListOfLong( @@ -64,4 +68,16 @@ export function parseProtoListOfLong( longs: Number, }).items; } + +export function parseProtoAnnotation(annotationArrayBuffer: ArrayBuffer): any { + const protoRoot = Root.fromJSON(AnnotationProto); + const messageType = protoRoot.lookupType(`${PROTO_PACKAGE}.AnnotationProto`); + const message = messageType.decode(new Uint8Array(annotationArrayBuffer)); + return messageType.toObject(message, { + arrays: true, + objects: true, + enums: String, + longs: Number, + }) as APITracingStoreAnnotation; +} export default {}; diff --git a/frontend/javascripts/oxalis/model/reducers/annotation_reducer.ts b/frontend/javascripts/oxalis/model/reducers/annotation_reducer.ts index 0bc85460aa5..11ea81b47e3 100644 --- a/frontend/javascripts/oxalis/model/reducers/annotation_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/annotation_reducer.ts @@ -6,7 +6,6 @@ import { updateKey, updateKey2 } from "oxalis/model/helpers/deep_update"; import { maybeGetSomeTracing } from "oxalis/model/accessors/tracing_accessor"; import * as Utils from "libs/utils"; import { getDisplayedDataExtentInPlaneMode } from "oxalis/model/accessors/view_mode_accessor"; -import { convertServerAnnotationToFrontendAnnotation } from "oxalis/model/reducers/reducer_helpers"; import _ from "lodash"; import { getAdditionalCoordinatesAsString } from "../accessors/flycam_accessor"; import { getMeshesForAdditionalCoordinates } from "../accessors/volumetracing_accessor"; @@ -75,8 +74,7 @@ const maybeAddAdditionalCoordinatesToMeshState = ( function AnnotationReducer(state: OxalisState, action: Action): OxalisState { switch (action.type) { case "INITIALIZE_ANNOTATION": { - const annotationInfo = convertServerAnnotationToFrontendAnnotation(action.annotation); - return updateTracing(state, annotationInfo); + return updateTracing(state, action.annotation); } case "SET_ANNOTATION_NAME": { diff --git a/frontend/javascripts/oxalis/model/reducers/connectome_reducer.ts b/frontend/javascripts/oxalis/model/reducers/connectome_reducer.ts index 1a069bbdd02..e013de440f4 100644 --- a/frontend/javascripts/oxalis/model/reducers/connectome_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/connectome_reducer.ts @@ -75,7 +75,6 @@ function ConnectomeReducer(state: OxalisState, action: Action): OxalisState { trees: {}, treeGroups: [], tracingId: "connectome-tracing-data", - version: 1, boundingBox: null, userBoundingBoxes: [], navigationList: { diff --git a/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts b/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts index 7326ddd1ba8..9f4d556f8ee 100644 --- a/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts +++ b/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts @@ -84,7 +84,11 @@ export function convertPointToVecInBoundingBox(boundingBox: ServerBoundingBox): topLeft: Utils.point3ToVector3(boundingBox.topLeft), }; } -export function convertServerAnnotationToFrontendAnnotation(annotation: APIAnnotation): Annotation { +export function convertServerAnnotationToFrontendAnnotation( + annotation: APIAnnotation, + version: number, + earliestAccessibleVersion: number, +): Annotation { const { id: annotationId, visibility, @@ -93,6 +97,7 @@ export function convertServerAnnotationToFrontendAnnotation(annotation: APIAnnot name, typ: annotationType, tracingStore, + stats, owner, contributors, organization, @@ -106,6 +111,9 @@ export function convertServerAnnotationToFrontendAnnotation(annotation: APIAnnot restrictions, visibility, tags, + version, + earliestAccessibleVersion, + stats, description, name, annotationType, diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 7460f0e8de9..39c9c72edad 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -1,23 +1,12 @@ import _ from "lodash"; import update from "immutability-helper"; import type { Action } from "oxalis/model/actions/actions"; -import type { OxalisState, SaveState, SaveQueueEntry } from "oxalis/store"; -import type { - SetVersionNumberAction, - SetLastSaveTimestampAction, - SaveQueueType, -} from "oxalis/model/actions/save_actions"; +import type { OxalisState, SaveState } from "oxalis/store"; import { getActionLog } from "oxalis/model/helpers/action_logger_middleware"; -import { getStats } from "oxalis/model/accessors/annotation_accessor"; +import { type TracingStats, getStats } from "oxalis/model/accessors/annotation_accessor"; import { MAXIMUM_ACTION_COUNT_PER_BATCH } from "oxalis/model/sagas/save_saga_constants"; -import { selectQueue } from "oxalis/model/accessors/save_accessor"; -import { updateKey2 } from "oxalis/model/helpers/deep_update"; -import { - updateEditableMapping, - updateVolumeTracing, -} from "oxalis/model/reducers/volumetracing_reducer_helpers"; +import { updateKey, updateKey2 } from "oxalis/model/helpers/deep_update"; import Date from "libs/date"; -import * as Utils from "libs/utils"; // These update actions are not idempotent. Having them // twice in the save queue causes a corruption of the current annotation. @@ -31,130 +20,28 @@ const NOT_IDEMPOTENT_ACTIONS = [ "deleteNode", ]; -type TracingDict = { - skeleton: V; - volumes: Record; - mappings: Record; -}; - -function updateTracingDict( - action: { saveQueueType: SaveQueueType; tracingId: string }, - oldDict: TracingDict, - newValue: V, -): TracingDict { - if (action.saveQueueType === "skeleton") { - return { ...oldDict, skeleton: newValue }; - } else if (action.saveQueueType === "volume") { - return { - ...oldDict, - volumes: { ...oldDict.volumes, [action.tracingId]: newValue }, - }; - } else if (action.saveQueueType === "mapping") { - return { - ...oldDict, - mappings: { ...oldDict.mappings, [action.tracingId]: newValue }, - }; - } - - return oldDict; -} - export function getTotalSaveQueueLength(queueObj: SaveState["queue"]) { - return ( - queueObj.skeleton.length + - _.sum( - Utils.values(queueObj.volumes).map((volumeQueue: SaveQueueEntry[]) => volumeQueue.length), - ) + - _.sum( - Utils.values(queueObj.mappings).map((mappingQueue: SaveQueueEntry[]) => mappingQueue.length), - ) - ); -} - -function updateVersion(state: OxalisState, action: SetVersionNumberAction) { - if (action.saveQueueType === "skeleton" && state.tracing.skeleton != null) { - return updateKey2(state, "tracing", "skeleton", { - version: action.version, - }); - } else if (action.saveQueueType === "volume") { - return updateVolumeTracing(state, action.tracingId, { - version: action.version, - }); - } else if (action.saveQueueType === "mapping") { - return updateEditableMapping(state, action.tracingId, { - version: action.version, - }); - } - - return state; -} - -function updateLastSaveTimestamp(state: OxalisState, action: SetLastSaveTimestampAction) { - if (action.saveQueueType === "skeleton") { - return updateKey2(state, "save", "lastSaveTimestamp", { - skeleton: action.timestamp, - }); - } else if (action.saveQueueType === "volume") { - const newVolumesDict = { - ...state.save.lastSaveTimestamp.volumes, - [action.tracingId]: action.timestamp, - }; - return updateKey2(state, "save", "lastSaveTimestamp", { - volumes: newVolumesDict, - }); - } else if (action.saveQueueType === "mapping") { - const newMappingsDict = { - ...state.save.lastSaveTimestamp.mappings, - [action.tracingId]: action.timestamp, - }; - return updateKey2(state, "save", "lastSaveTimestamp", { - mappings: newMappingsDict, - }); - } - - return state; + return queueObj.length; } function SaveReducer(state: OxalisState, action: Action): OxalisState { switch (action.type) { - case "INITIALIZE_VOLUMETRACING": { - // Set up empty save queue array for volume tracing - const newVolumesQueue = { ...state.save.queue.volumes, [action.tracing.id]: [] }; - return updateKey2(state, "save", "queue", { - volumes: newVolumesQueue, - }); - } - - case "INITIALIZE_EDITABLE_MAPPING": { - // Set up empty save queue array for editable mapping - const newMappingsQueue = { ...state.save.queue.mappings, [action.mapping.tracingId]: [] }; - return updateKey2(state, "save", "queue", { - mappings: newMappingsQueue, - }); - } - case "PUSH_SAVE_QUEUE_TRANSACTION": { - const { items, transactionId } = action; - if (items.length === 0) { - return state; - } - // Only report tracing statistics, if a "real" update to the tracing happened - const stats = _.some(action.items, (ua) => ua.name !== "updateTracing") - ? getStats(state.tracing, action.saveQueueType, action.tracingId) - : null; + // Use `dispatchedAction` to better distinguish this variable from + // update actions. + const dispatchedAction = action; + const { items, transactionId } = dispatchedAction; + const stats: TracingStats = getStats(state.tracing); const { activeUser } = state; if (activeUser == null) { throw new Error("Tried to save something even though user is not logged in."); } - const updateActionChunks = _.chunk( - items, - MAXIMUM_ACTION_COUNT_PER_BATCH[action.saveQueueType], - ); + const updateActionChunks = _.chunk(items, MAXIMUM_ACTION_COUNT_PER_BATCH); const transactionGroupCount = updateActionChunks.length; const actionLogInfo = JSON.stringify(getActionLog().slice(-10)); - const oldQueue = selectQueue(state, action.saveQueueType, action.tracingId); + const oldQueue = state.save.queue; const newQueue = oldQueue.concat( updateActionChunks.map((actions, transactionGroupIndex) => ({ // Placeholder, the version number will be updated before sending to the server @@ -176,7 +63,6 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { // caught by the following check. If the bug appears again, we can investigate with more // details thanks to airbrake. if ( - action.saveQueueType === "skeleton" && oldQueue.length > 0 && newQueue.length > 0 && newQueue.at(-1)?.actions.some((action) => NOT_IDEMPOTENT_ACTIONS.includes(action.name)) && @@ -192,11 +78,10 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { ); } - const newQueueObj = updateTracingDict(action, state.save.queue, newQueue); return update(state, { save: { queue: { - $set: newQueueObj, + $set: newQueue, }, progressInfo: { totalActionCount: { @@ -211,7 +96,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { const { count } = action; if (count > 0) { - const queue = selectQueue(state, action.saveQueueType, action.tracingId); + const queue = state.save.queue; const processedQueueActionCount = _.sumBy( queue.slice(0, count), @@ -219,13 +104,12 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { ); const remainingQueue = queue.slice(count); - const newQueueObj = updateTracingDict(action, state.save.queue, remainingQueue); - const remainingQueueLength = getTotalSaveQueueLength(newQueueObj); + const remainingQueueLength = getTotalSaveQueueLength(remainingQueue); const resetCounter = remainingQueueLength === 0; return update(state, { save: { queue: { - $set: newQueueObj, + $set: remainingQueue, }, progressInfo: { // Reset progress counters if the queue is empty. Otherwise, @@ -248,11 +132,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { return update(state, { save: { queue: { - $set: { - skeleton: [], - volumes: _.mapValues(state.save.queue.volumes, () => []), - mappings: _.mapValues(state.save.queue.mappings, () => []), - }, + $set: [], }, progressInfo: { processedActionCount: { @@ -267,22 +147,23 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { } case "SET_SAVE_BUSY": { - const newIsBusyInfo = updateTracingDict(action, state.save.isBusyInfo, action.isBusy); return update(state, { save: { - isBusyInfo: { - $set: newIsBusyInfo, + isBusy: { + $set: action.isBusy, }, }, }); } case "SET_LAST_SAVE_TIMESTAMP": { - return updateLastSaveTimestamp(state, action); + return updateKey2(state, "save", "lastSaveTimestamp", action.timestamp); } case "SET_VERSION_NUMBER": { - return updateVersion(state, action); + return updateKey(state, "tracing", { + version: action.version, + }); } case "DISABLE_SAVING": { diff --git a/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts b/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts index 16b3c90e4f0..20ec6539289 100644 --- a/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts @@ -106,7 +106,6 @@ function SkeletonTracingReducer(state: OxalisState, action: Action): OxalisState trees, treeGroups: action.tracing.treeGroups || [], tracingId: action.tracing.id, - version: action.tracing.version, boundingBox: convertServerBoundingBoxToFrontend(action.tracing.boundingBox), userBoundingBoxes, navigationList: { @@ -442,11 +441,7 @@ function SkeletonTracingReducer(state: OxalisState, action: Action): OxalisState return update(state, { tracing: { skeleton: { - $set: update(action.tracing, { - version: { - $set: skeletonTracing.version, - }, - }), + $set: action.tracing, }, }, }); diff --git a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts index ec4ea3835af..46d3bcefe84 100644 --- a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts @@ -262,7 +262,6 @@ export function serverVolumeToClientVolumeTracing(tracing: ServerVolumeTracing): contourList: [], largestSegmentId, tracingId: tracing.id, - version: tracing.version, boundingBox: convertServerBoundingBoxToFrontend(tracing.boundingBox), fallbackLayer: tracing.fallbackLayer, userBoundingBoxes, diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx index c93961d49cd..255a47fbcfb 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx @@ -4,16 +4,14 @@ import type { Action } from "oxalis/model/actions/actions"; import { type EditAnnotationLayerAction, setAnnotationAllowUpdateAction, + type SetAnnotationDescriptionAction, setBlockedByUserAction, type SetOthersMayEditForAnnotationAction, } from "oxalis/model/actions/annotation_actions"; +import * as Utils from "libs/utils"; import type { EditableAnnotation } from "admin/admin_rest_api"; import type { ActionPattern } from "redux-saga/effects"; -import { - editAnnotation, - updateAnnotationLayer, - acquireAnnotationMutex, -} from "admin/admin_rest_api"; +import { editAnnotation, acquireAnnotationMutex } from "admin/admin_rest_api"; import { SETTINGS_MAX_RETRY_COUNT, SETTINGS_RETRY_DELAY, @@ -47,12 +45,24 @@ import { determineLayout } from "oxalis/view/layouting/default_layout_configs"; import { getLastActiveLayout, getLayoutConfig } from "oxalis/view/layouting/layout_persistence"; import { is3dViewportMaximized } from "oxalis/view/layouting/flex_layout_helper"; import { needsLocalHdf5Mapping } from "../accessors/volumetracing_accessor"; +import { pushSaveQueueTransaction } from "../actions/save_actions"; +import { updateAnnotationLayerName, updateMetadataOfAnnotation } from "./update_actions"; +import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; +import { ensureWkReady } from "./ready_sagas"; /* Note that this must stay in sync with the back-end constant MaxMagForAgglomerateMapping compare https://github.com/scalableminds/webknossos/issues/5223. */ const MAX_MAG_FOR_AGGLOMERATE_MAPPING = 16; +export function* pushAnnotationDescriptionUpdateAction(action: SetAnnotationDescriptionAction) { + const mayEdit = yield* select((state) => mayEditAnnotationProperties(state)); + if (!mayEdit) { + return; + } + yield* put(pushSaveQueueTransaction([updateMetadataOfAnnotation(action.description)])); +} + export function* pushAnnotationUpdateAsync(action: Action) { const tracing = yield* select((state) => state.tracing); const mayEdit = yield* select((state) => mayEditAnnotationProperties(state)); @@ -72,7 +82,6 @@ export function* pushAnnotationUpdateAsync(action: Action) { const editObject: Partial = { name: tracing.name, visibility: tracing.visibility, - description: tracing.description, viewConfiguration, }; try { @@ -103,19 +112,19 @@ export function* pushAnnotationUpdateAsync(action: Action) { function* pushAnnotationLayerUpdateAsync(action: EditAnnotationLayerAction): Saga { const { tracingId, layerProperties } = action; - const annotationId = yield* select((storeState) => storeState.tracing.annotationId); - const annotationType = yield* select((storeState) => storeState.tracing.annotationType); - yield* retry( - SETTINGS_MAX_RETRY_COUNT, - SETTINGS_RETRY_DELAY, - updateAnnotationLayer, - annotationId, - annotationType, - tracingId, - layerProperties, + yield* put( + pushSaveQueueTransaction([updateAnnotationLayerName(tracingId, layerProperties.name)]), ); } +export function* checkVersionRestoreParam(): Saga { + const showVersionRestore = yield* call(Utils.hasUrlParam, "showVersionRestore"); + + if (showVersionRestore) { + yield* put(setVersionRestoreVisibilityAction(true)); + } +} + function shouldDisplaySegmentationData(): boolean { const state = Store.getState(); const currentViewMode = state.temporaryConfiguration.viewMode; @@ -183,7 +192,7 @@ export function* warnAboutSegmentationZoom(): Saga { } } - yield* take("WK_READY"); + yield* call(ensureWkReady); // Wait before showing the initial warning. Due to initialization lag it may only be visible very briefly, otherwise. yield* delay(5000); yield* warnMaybe(); @@ -214,9 +223,11 @@ export function* watchAnnotationAsync(): Saga { // name, only the latest action is relevant. If `_takeEvery` was used, // all updates to the annotation name would be retried regularly, which // would also cause race conditions. - yield* takeLatest("SET_ANNOTATION_NAME", pushAnnotationUpdateAsync); - yield* takeLatest("SET_ANNOTATION_VISIBILITY", pushAnnotationUpdateAsync); - yield* takeLatest("SET_ANNOTATION_DESCRIPTION", pushAnnotationUpdateAsync); + yield* takeLatest( + ["SET_ANNOTATION_NAME", "SET_ANNOTATION_VISIBILITY"], + pushAnnotationUpdateAsync, + ); + yield* takeLatest("SET_ANNOTATION_DESCRIPTION", pushAnnotationDescriptionUpdateAction); yield* takeLatest( ((action: Action) => action.type === "UPDATE_LAYER_SETTING" && @@ -227,7 +238,7 @@ export function* watchAnnotationAsync(): Saga { } export function* acquireAnnotationMutexMaybe(): Saga { - yield* take("WK_READY"); + yield* call(ensureWkReady); const allowUpdate = yield* select((state) => state.tracing.restrictions.allowUpdate); const annotationId = yield* select((storeState) => storeState.tracing.annotationId); if (!allowUpdate) { @@ -334,4 +345,9 @@ export function* acquireAnnotationMutexMaybe(): Saga { } yield* takeEvery("SET_OTHERS_MAY_EDIT_FOR_ANNOTATION", reactToOthersMayEditChanges); } -export default [warnAboutSegmentationZoom, watchAnnotationAsync, acquireAnnotationMutexMaybe]; +export default [ + warnAboutSegmentationZoom, + watchAnnotationAsync, + acquireAnnotationMutexMaybe, + checkVersionRestoreParam, +]; diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_tool_saga.ts b/frontend/javascripts/oxalis/model/sagas/annotation_tool_saga.ts index 84f4e720c6e..480df8f7071 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_tool_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/annotation_tool_saga.ts @@ -11,8 +11,10 @@ import { getNextTool } from "oxalis/model/reducers/reducer_helpers"; import { getToolClassForAnnotationTool } from "oxalis/controller/combinations/tool_controls"; import getSceneController from "oxalis/controller/scene_controller_provider"; import { AnnotationToolEnum, MeasurementTools } from "oxalis/constants"; +import { ensureWkReady } from "./ready_sagas"; + export function* watchToolDeselection(): Saga { - yield* take("WK_READY"); + yield* call(ensureWkReady); let previousTool = yield* select((state) => state.uiInformation.activeTool); while (true) { diff --git a/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts b/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts index b10c681ba5c..9ba779b6de3 100644 --- a/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts @@ -1,4 +1,4 @@ -import { call, put, take, takeEvery, takeLatest } from "typed-redux-saga"; +import { call, put, takeEvery, takeLatest } from "typed-redux-saga"; import { sum } from "lodash"; import type { Saga } from "oxalis/model/sagas/effect-generators"; import { select } from "oxalis/model/sagas/effect-generators"; @@ -23,6 +23,7 @@ import { type EnsureSegmentIndexIsLoadedAction, setLayerHasSegmentIndexAction, } from "../actions/dataset_actions"; +import { ensureWkReady } from "./ready_sagas"; export function* watchMaximumRenderableLayers(): Saga { function* warnMaybe(): Saga { @@ -148,7 +149,8 @@ export function* watchZ1Downsampling(): Saga { Toast.close("DOWNSAMPLING_CAUSES_BAD_QUALITY"); } } - yield* take("WK_READY"); + + yield* call(ensureWkReady); yield* call(maybeShowWarning); yield* takeLatest( ["ZOOM_IN", "ZOOM_OUT", "ZOOM_BY_DELTA", "SET_ZOOM_STEP", "SET_STORED_LAYOUTS"], diff --git a/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts b/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts index ad9b3249cf6..9cee9a4ba36 100644 --- a/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts @@ -1,6 +1,6 @@ import type { Saga } from "oxalis/model/sagas/effect-generators"; import { select } from "oxalis/model/sagas/effect-generators"; -import { call, take, takeEvery, put } from "typed-redux-saga"; +import { call, takeEvery, put } from "typed-redux-saga"; import { setHistogramDataForLayerAction, updateLayerSettingAction, @@ -10,9 +10,10 @@ import { getHistogramForLayer } from "admin/admin_rest_api"; import type DataLayer from "oxalis/model/data_layer"; import { Model } from "oxalis/singletons"; import type { Vector2 } from "oxalis/constants"; +import { ensureWkReady } from "./ready_sagas"; export default function* loadHistogramDataSaga(): Saga { - yield* take("WK_READY"); + yield* call(ensureWkReady); yield* takeEvery("RELOAD_HISTOGRAM", reloadHistogramForLayer); const dataLayers: Array = yield* call([Model, Model.getColorLayers]); diff --git a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts index 0ec9d30ab66..d26dabd183b 100644 --- a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts @@ -73,6 +73,7 @@ import { fastDiffSetAndMap, sleep } from "libs/utils"; import type { Action } from "../actions/actions"; import type { ActionPattern } from "redux-saga/effects"; import { listenToStoreProperty } from "../helpers/listener_helpers"; +import { ensureWkReady } from "./ready_sagas"; type APIMappings = Record; type Container = { value: T }; @@ -96,7 +97,9 @@ const takeLatestMappingChange = ( ); const mapping = getMappingInfo(activeMappingByLayer, layerName); - console.log("Changed from", lastBucketRetrievalSource, "to", bucketRetrievalSource); + if (process.env.NODE_ENV === "production") { + console.log("Changed from", lastBucketRetrievalSource, "to", bucketRetrievalSource); + } if (lastWatcherTask) { console.log("Cancel old bucket watcher"); @@ -137,7 +140,7 @@ export default function* watchActivatedMappings(): Saga { }; // Buffer actions since they might be dispatched before WK_READY const setMappingActionChannel = yield* actionChannel("SET_MAPPING"); - yield* take("WK_READY"); + yield* call(ensureWkReady); yield* takeLatest(setMappingActionChannel, handleSetMapping, oldActiveMappingByLayer); yield* takeEvery( "ENSURE_LAYER_MAPPINGS_ARE_LOADED", @@ -360,7 +363,13 @@ function* handleSetMapping( return; } - if (showLoadingIndicator) { + const visibleSegmentationLayerName = yield* select( + (state) => getVisibleSegmentationLayer(state)?.name, + ); + if (showLoadingIndicator && layerName === visibleSegmentationLayerName) { + // Only show the message if the mapping belongs to the currently visible + // segmentation layer. Otherwise, the message would stay as long as the + // actual layer not visible. message.loading({ content: "Activating Mapping", key: MAPPING_MESSAGE_KEY, @@ -455,6 +464,8 @@ function* updateLocalHdf5Mapping( annotation.tracingStore.url, editableMapping.tracingId, Array.from(newSegmentIds), + annotation.annotationId, + annotation.version, ) : yield* call( getAgglomeratesForSegmentsFromDatastore, diff --git a/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts b/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts index 2fdf47b3710..bc56aad1fa3 100644 --- a/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts @@ -82,6 +82,7 @@ import type { FlycamAction } from "../actions/flycam_actions"; import { getAdditionalCoordinatesAsString } from "../accessors/flycam_accessor"; import type { BufferGeometryWithInfo } from "oxalis/controller/segment_mesh_controller"; import { WkDevFlags } from "oxalis/api/wk_dev"; +import { ensureSceneControllerReady, ensureWkReady } from "./ready_sagas"; export const NO_LOD_MESH_INDEX = -1; const MAX_RETRY_COUNT = 5; @@ -673,7 +674,7 @@ function* _refreshMeshWithMap( // Avoid redundant fetches of mesh files for the same layer by // storing Deferreds per layer lazily. -const fetchDeferredsPerLayer: Record, unknown>> = {}; +let fetchDeferredsPerLayer: Record, unknown>> = {}; function* maybeFetchMeshFiles(action: MaybeFetchMeshFilesAction): Saga { const { segmentationLayer, dataset, mustRequest, autoActivate, callback } = action; @@ -1202,7 +1203,7 @@ export function* handleAdditionalCoordinateUpdate(): Saga { // We want to prevent iterating through all additional coordinates to adjust the mesh visibility, so we store the // previous additional coordinates in this method. Thus we have to catch SET_ADDITIONAL_COORDINATES actions in a // while-true loop and register this saga in the root saga instead of calling from the mesh saga. - yield* take("WK_READY"); + yield* call(ensureWkReady); let previousAdditionalCoordinates = yield* select((state) => state.flycam.additionalCoordinates); const { segmentMeshController } = yield* call(getSceneController); @@ -1277,13 +1278,14 @@ function* handleBatchSegmentColorChange( } export default function* meshSaga(): Saga { + fetchDeferredsPerLayer = {}; // Buffer actions since they might be dispatched before WK_READY const loadAdHocMeshActionChannel = yield* actionChannel("LOAD_AD_HOC_MESH_ACTION"); const loadPrecomputedMeshActionChannel = yield* actionChannel("LOAD_PRECOMPUTED_MESH_ACTION"); const maybeFetchMeshFilesActionChannel = yield* actionChannel("MAYBE_FETCH_MESH_FILES"); - yield* take("SCENE_CONTROLLER_READY"); - yield* take("WK_READY"); + yield* call(ensureSceneControllerReady); + yield* call(ensureWkReady); yield* takeEvery(maybeFetchMeshFilesActionChannel, maybeFetchMeshFiles); yield* takeEvery(loadAdHocMeshActionChannel, loadAdHocMeshFromAction); yield* takeEvery(loadPrecomputedMeshActionChannel, loadPrecomputedMesh); diff --git a/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts b/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts index 49e160331f1..d06315bedd6 100644 --- a/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts @@ -8,7 +8,7 @@ import { } from "oxalis/model/bucket_data_handling/prefetch_strategy_plane"; import { getGlobalDataConnectionInfo } from "oxalis/model/data_connection_info"; import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { throttle, call, take } from "typed-redux-saga"; +import { throttle, call } from "typed-redux-saga"; import { select } from "oxalis/model/sagas/effect-generators"; import { getPosition, @@ -21,6 +21,7 @@ import { Model } from "oxalis/singletons"; import type { Vector3 } from "oxalis/constants"; import constants from "oxalis/constants"; import { WkDevFlags } from "oxalis/api/wk_dev"; +import { ensureWkReady } from "./ready_sagas"; const PREFETCH_THROTTLE_TIME = 50; const DIRECTION_VECTOR_SMOOTHER = 0.125; @@ -28,7 +29,8 @@ const prefetchStrategiesArbitrary = [new PrefetchStrategyArbitrary()]; const prefetchStrategiesPlane = [new PrefetchStrategySkeleton(), new PrefetchStrategyVolume()]; export function* watchDataRelevantChanges(): Saga { - yield* take("WK_READY"); + yield* call(ensureWkReady); + const previousProperties = {}; // Initiate the prefetching once and then only for data relevant changes yield* call(triggerDataPrefetching, previousProperties); diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index 3839d8f48c3..8f38a2f68bc 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -33,14 +33,11 @@ import { getTreeNameForAgglomerateSkeleton, isSkeletonLayerTransformed, } from "oxalis/model/accessors/skeletontracing_accessor"; -import { - pushSaveQueueTransaction, - setVersionNumberAction, -} from "oxalis/model/actions/save_actions"; +import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; import { splitAgglomerate, mergeAgglomerate, - type UpdateAction, + type UpdateActionWithoutIsolationRequirement, } from "oxalis/model/sagas/update_actions"; import { Model, api, Store } from "oxalis/singletons"; import { @@ -62,7 +59,6 @@ import { getEdgesForAgglomerateMinCut, getNeighborsForAgglomerateNode, getPositionForSegmentInAgglomerate, - makeMappingEditable, } from "admin/admin_rest_api"; import { setMappingAction, setMappingNameAction } from "oxalis/model/actions/settings_actions"; import { getSegmentIdForPositionAsync } from "oxalis/controller/combinations/volume_handlers"; @@ -78,11 +74,12 @@ import { } from "oxalis/model/actions/annotation_actions"; import type { ActiveMappingInfo, Mapping, NumberLikeMap, Tree, VolumeTracing } from "oxalis/store"; import _ from "lodash"; -import type { AdditionalCoordinate } from "types/api_flow_types"; +import type { AdditionalCoordinate, ServerEditableMapping } from "types/api_flow_types"; import { takeEveryUnlessBusy } from "./saga_helpers"; import type { Action } from "../actions/actions"; import { isBigInt, isNumberMap, SoftError } from "libs/utils"; import { getCurrentMag } from "../accessors/flycam_accessor"; +import { ensureWkReady } from "./ready_sagas"; function runSagaAndCatchSoftError(saga: (...args: any[]) => Saga) { return function* (...args: any[]) { @@ -100,7 +97,8 @@ function runSagaAndCatchSoftError(saga: (...args: any[]) => Saga) { export default function* proofreadRootSaga(): Saga { yield* take("INITIALIZE_SKELETONTRACING"); - yield* take("WK_READY"); + yield* call(ensureWkReady); + yield* takeEveryUnlessBusy( ["DELETE_EDGE", "MERGE_TREES", "MIN_CUT_AGGLOMERATE_WITH_NODE_IDS"], runSagaAndCatchSoftError(handleSkeletonProofreadingAction), @@ -264,10 +262,12 @@ function* createEditableMapping(): Saga { * Returns the name of the editable mapping. This is not identical to the * name of the HDF5 mapping for which the editable mapping is about to be created. */ - const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); - // Save before making the mapping editable to make sure the correct mapping is activated in the backend - yield* call([Model, Model.ensureSavedState]); // Get volume tracing again to make sure the version is up to date + const volumeTracing = yield* select((state) => getActiveSegmentationTracing(state)); + if (!volumeTracing || !volumeTracing.mappingName) { + // This should never occur, because the proofreading tool is only available when a volume tracing layer is active. + throw new Error("No active segmentation tracing layer. Cannot create editable mapping."); + } const upToDateVolumeTracing = yield* select((state) => getActiveSegmentationTracing(state)); if (upToDateVolumeTracing == null) { throw new Error("No active segmentation tracing layer. Cannot create editable mapping."); @@ -275,13 +275,18 @@ function* createEditableMapping(): Saga { const volumeTracingId = upToDateVolumeTracing.tracingId; const layerName = volumeTracingId; - const serverEditableMapping = yield* call(makeMappingEditable, tracingStoreUrl, volumeTracingId); - // The server increments the volume tracing's version by 1 when switching the mapping to an editable one - yield* put(setVersionNumberAction(upToDateVolumeTracing.version + 1, "volume", volumeTracingId)); - yield* put(setMappingNameAction(layerName, serverEditableMapping.mappingName, "HDF5")); + const baseMappingName = volumeTracing.mappingName; + yield* put(setMappingNameAction(layerName, volumeTracingId, "HDF5")); yield* put(setHasEditableMappingAction()); - yield* put(initializeEditableMappingAction(serverEditableMapping)); - return serverEditableMapping.mappingName; + // Ensure a saved state so that the mapping is locked and editable before doing the first proofreading operation. + yield* call([Model, Model.ensureSavedState]); + const editableMapping: ServerEditableMapping = { + baseMappingName: baseMappingName, + tracingId: volumeTracingId, + createdTimestamp: Date.now(), + }; + yield* put(initializeEditableMappingAction(editableMapping)); + return volumeTracingId; } function* ensureHdf5MappingIsEnabled(layerName: string): Saga { @@ -387,12 +392,10 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { const sourceAgglomerateId = sourceInfo.agglomerateId; const targetAgglomerateId = targetInfo.agglomerateId; - const editableMappingId = volumeTracing.mappingName; - /* Send the respective split/merge update action to the backend (by pushing to the save queue and saving immediately) */ - const items: UpdateAction[] = []; + const items: UpdateActionWithoutIsolationRequirement[] = []; if (action.type === "MERGE_TREES") { if (sourceAgglomerateId === targetAgglomerateId) { Toast.error("Segments that should be merged need to be in different agglomerates."); @@ -405,6 +408,7 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { sourceInfo.unmappedId, targetInfo.unmappedId, agglomerateFileMag, + volumeTracingId, ), ); const mergedMapping = yield* call( @@ -429,6 +433,7 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { sourceInfo.unmappedId, targetInfo.unmappedId, agglomerateFileMag, + volumeTracingId, ), ); } else if (action.type === "MIN_CUT_AGGLOMERATE_WITH_NODE_IDS") { @@ -439,7 +444,6 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { sourceInfo.unmappedId, targetInfo.unmappedId, agglomerateFileMag, - editableMappingId, volumeTracingId, sourceTree, items, @@ -453,7 +457,7 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { return; } - yield* put(pushSaveQueueTransaction(items, "mapping", volumeTracingId)); + yield* put(pushSaveQueueTransaction(items)); yield* call([Model, Model.ensureSavedState]); if (action.type === "MIN_CUT_AGGLOMERATE_WITH_NODE_IDS" || action.type === "DELETE_EDGE") { @@ -527,10 +531,9 @@ function* performMinCut( sourceSegmentId: number, targetSegmentId: number, agglomerateFileMag: Vector3, - editableMappingId: string, volumeTracingId: string, sourceTree: Tree | null, - items: UpdateAction[], + items: UpdateActionWithoutIsolationRequirement[], ): Saga { if (sourceAgglomerateId !== targetAgglomerateId) { Toast.error( @@ -545,7 +548,7 @@ function* performMinCut( segmentId2: targetSegmentId, mag: agglomerateFileMag, agglomerateId: sourceAgglomerateId, - editableMappingId, + editableMappingId: volumeTracingId, }; const edgesToRemove = yield* call( @@ -581,7 +584,13 @@ function* performMinCut( edge.segmentId2, ); items.push( - splitAgglomerate(sourceAgglomerateId, edge.segmentId1, edge.segmentId2, agglomerateFileMag), + splitAgglomerate( + sourceAgglomerateId, + edge.segmentId1, + edge.segmentId2, + agglomerateFileMag, + volumeTracingId, + ), ); } @@ -593,10 +602,9 @@ function* performCutFromNeighbors( segmentId: number, segmentPosition: Vector3 | null, agglomerateFileMag: Vector3, - editableMappingId: string, volumeTracingId: string, sourceTree: Tree | null | undefined, - items: UpdateAction[], + items: UpdateActionWithoutIsolationRequirement[], ): Saga< { didCancel: false; neighborInfo: NeighborInfo } | { didCancel: true; neighborInfo?: null } > { @@ -605,7 +613,7 @@ function* performCutFromNeighbors( segmentId, mag: agglomerateFileMag, agglomerateId, - editableMappingId, + editableMappingId: volumeTracingId, }; const neighborInfo = yield* call( @@ -663,7 +671,13 @@ function* performCutFromNeighbors( } items.push( - splitAgglomerate(agglomerateId, edge.segmentId1, edge.segmentId2, agglomerateFileMag), + splitAgglomerate( + agglomerateId, + edge.segmentId1, + edge.segmentId2, + agglomerateFileMag, + volumeTracingId, + ), ); } @@ -714,7 +728,7 @@ function* handleProofreadMergeOrMinCut(action: Action) { /* Send the respective split/merge update action to the backend (by pushing to the save queue and saving immediately) */ - const items: UpdateAction[] = []; + const items: UpdateActionWithoutIsolationRequirement[] = []; if (action.type === "PROOFREAD_MERGE") { if (sourceAgglomerateId === targetAgglomerateId) { @@ -729,6 +743,7 @@ function* handleProofreadMergeOrMinCut(action: Action) { sourceInfo.unmappedId, targetInfo.unmappedId, agglomerateFileMag, + volumeTracingId, ), ); @@ -767,7 +782,6 @@ function* handleProofreadMergeOrMinCut(action: Action) { sourceInfo.unmappedId, targetInfo.unmappedId, agglomerateFileMag, - volumeTracing.mappingName, volumeTracingId, null, items, @@ -781,7 +795,7 @@ function* handleProofreadMergeOrMinCut(action: Action) { return; } - yield* put(pushSaveQueueTransaction(items, "mapping", volumeTracingId)); + yield* put(pushSaveQueueTransaction(items)); yield* call([Model, Model.ensureSavedState]); if (action.type === "MIN_CUT_AGGLOMERATE") { @@ -918,14 +932,12 @@ function* handleProofreadCutFromNeighbors(action: Action) { const targetAgglomerateId = idInfos[0].agglomerateId; const targetSegmentId = idInfos[0].unmappedId; - const editableMappingId = volumeTracing.mappingName; - const targetAgglomerate = volumeTracing.segments.getNullable(Number(targetAgglomerateId)); /* Send the respective split/merge update action to the backend (by pushing to the save queue and saving immediately) */ - const items: UpdateAction[] = []; + const items: UpdateActionWithoutIsolationRequirement[] = []; const { didCancel, neighborInfo } = yield* call( performCutFromNeighbors, @@ -933,7 +945,6 @@ function* handleProofreadCutFromNeighbors(action: Action) { targetSegmentId, targetPosition, agglomerateFileMag, - editableMappingId, volumeTracingId, action.tree, items, @@ -942,7 +953,7 @@ function* handleProofreadCutFromNeighbors(action: Action) { return; } - yield* put(pushSaveQueueTransaction(items, "mapping", volumeTracingId)); + yield* put(pushSaveQueueTransaction(items)); yield* call([Model, Model.ensureSavedState]); // Now that the changes are saved, we can split the mapping locally (because it requires @@ -1272,14 +1283,16 @@ function* splitAgglomerateInMapping( .filter(([_segmentId, agglomerateId]) => agglomerateId === comparableSourceAgglomerateId) .map(([segmentId, _agglomerateId]) => segmentId); - const tracingStoreHost = yield* select((state) => state.tracing.tracingStore.url); + const annotationId = yield* select((state) => state.tracing.annotationId); + const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); // Ask the server to map the (split) segment ids. This creates a partial mapping // that only contains these ids. const mappingAfterSplit = yield* call( getAgglomeratesForSegmentsFromTracingstore, - tracingStoreHost, + tracingStoreUrl, volumeTracingId, splitSegmentIds, + annotationId, ); // Create a new mapping which is equal to the old one with the difference that diff --git a/frontend/javascripts/oxalis/model/sagas/ready_sagas.ts b/frontend/javascripts/oxalis/model/sagas/ready_sagas.ts new file mode 100644 index 00000000000..25898d935b3 --- /dev/null +++ b/frontend/javascripts/oxalis/model/sagas/ready_sagas.ts @@ -0,0 +1,41 @@ +import type { Saga } from "oxalis/model/sagas/effect-generators"; +import { take, takeEvery } from "typed-redux-saga"; + +let isWkReady = false; +let isSceneControllerReady = false; + +function setWkReady() { + isWkReady = true; +} + +function setSceneControllerReady() { + isSceneControllerReady = true; +} + +export function setWkReadyToFalse() { + isWkReady = false; +} + +function* listenForWkReady(): Saga { + yield* takeEvery("WK_READY", setWkReady); +} +function* listenForSceneControllerReady(): Saga { + yield* takeEvery("SCENE_CONTROLLER_READY", setSceneControllerReady); +} + +// The following two sagas are useful for other sagas that might be instantiated before or after +// the {WK,SCENE_CONTROLLER}_READY action was dispatched. If the action was dispatched +// before, this saga immediately returns, otherwise it waits +// until the action is dispatched. + +export function* ensureWkReady(): Saga { + if (isWkReady) return; + yield* take("WK_READY"); +} + +export function* ensureSceneControllerReady(): Saga { + if (isSceneControllerReady) return; + yield* take("SCENE_CONTROLLER_READY"); +} + +export default [listenForWkReady, listenForSceneControllerReady]; diff --git a/frontend/javascripts/oxalis/model/sagas/root_saga.ts b/frontend/javascripts/oxalis/model/sagas/root_saga.ts index 8e03f177535..e42faad6d62 100644 --- a/frontend/javascripts/oxalis/model/sagas/root_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/root_saga.ts @@ -17,7 +17,7 @@ import loadHistogramDataSaga from "oxalis/model/sagas/load_histogram_data_saga"; import listenToClipHistogramSaga from "oxalis/model/sagas/clip_histogram_saga"; import MappingSaga from "oxalis/model/sagas/mapping_saga"; import ProofreadSaga from "oxalis/model/sagas/proofread_saga"; -import { listenForWkReady } from "oxalis/model/sagas/wk_ready_saga"; +import ReadySagas, { setWkReadyToFalse } from "oxalis/model/sagas/ready_sagas"; import { warnIfEmailIsUnverified } from "./user_saga"; import type { EscalateErrorAction } from "../actions/actions"; @@ -28,6 +28,7 @@ export default function* rootSaga(): Saga { const task = yield* fork(restartableSaga); yield* take("RESTART_SAGA"); yield* cancel(task); + yield* call(setWkReadyToFalse); } } export function hasRootSagaCrashed() { @@ -46,7 +47,7 @@ function* listenToErrorEscalation() { function* restartableSaga(): Saga { try { yield* all([ - call(listenForWkReady), + ...ReadySagas.map((saga) => call(saga)), call(warnAboutMagRestriction), call(SettingsSaga), ...SkeletontracingSagas.map((saga) => call(saga)), diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index cf87972ffec..984f815934d 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -1,4 +1,4 @@ -import { doWithToken, getNewestVersionForTracing } from "admin/admin_rest_api"; +import { doWithToken, getNewestVersionForAnnotation } from "admin/admin_rest_api"; import Date from "libs/date"; import ErrorHandling from "libs/error_handling"; import type { RequestOptionsWithData } from "libs/request"; @@ -10,11 +10,8 @@ import _ from "lodash"; import messages from "messages"; import { ControlModeEnum } from "oxalis/constants"; import { getMagInfo } from "oxalis/model/accessors/dataset_accessor"; -import { selectQueue } from "oxalis/model/accessors/save_accessor"; import { selectTracing } from "oxalis/model/accessors/tracing_accessor"; -import { getVolumeTracingById } from "oxalis/model/accessors/volumetracing_accessor"; import { FlycamActions } from "oxalis/model/actions/flycam_actions"; -import type { SaveQueueType } from "oxalis/model/actions/save_actions"; import { pushSaveQueueTransaction, setLastSaveTimestampAction, @@ -26,7 +23,6 @@ import type { InitializeSkeletonTracingAction } from "oxalis/model/actions/skele import { SkeletonTracingSaveRelevantActions } from "oxalis/model/actions/skeletontracing_actions"; import { ViewModeSaveRelevantActions } from "oxalis/model/actions/view_mode_actions"; import { - type InitializeEditableMappingAction, type InitializeVolumeTracingAction, VolumeTracingSaveRelevantActions, } from "oxalis/model/actions/volumetracing_actions"; @@ -42,10 +38,12 @@ import { SAVE_RETRY_WAITING_TIME, } from "oxalis/model/sagas/save_saga_constants"; import { diffSkeletonTracing } from "oxalis/model/sagas/skeletontracing_saga"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; -import { updateTdCamera } from "oxalis/model/sagas/update_actions"; +import { + updateTdCamera, + type UpdateActionWithoutIsolationRequirement, +} from "oxalis/model/sagas/update_actions"; import { diffVolumeTracing } from "oxalis/model/sagas/volumetracing_saga"; -import { ensureWkReady } from "oxalis/model/sagas/wk_ready_saga"; +import { ensureWkReady } from "oxalis/model/sagas/ready_sagas"; import { Model } from "oxalis/singletons"; import type { CameraData, @@ -58,18 +56,18 @@ import { call, delay, fork, put, race, take, takeEvery } from "typed-redux-saga" const ONE_YEAR_MS = 365 * 24 * 3600 * 1000; -export function* pushSaveQueueAsync(saveQueueType: SaveQueueType, tracingId: string): Saga { +export function* pushSaveQueueAsync(): Saga { yield* call(ensureWkReady); - yield* put(setLastSaveTimestampAction(saveQueueType, tracingId)); + yield* put(setLastSaveTimestampAction()); let loopCounter = 0; while (true) { loopCounter++; let saveQueue; // Check whether the save queue is actually empty, the PUSH_SAVE_QUEUE_TRANSACTION action - // could have been triggered during the call to sendRequestToServer - saveQueue = yield* select((state) => selectQueue(state, saveQueueType, tracingId)); + // could have been triggered during the call to sendSaveRequestToServer + saveQueue = yield* select((state) => state.save.queue); if (saveQueue.length === 0) { if (loopCounter % 100 === 0) { @@ -86,7 +84,7 @@ export function* pushSaveQueueAsync(saveQueueType: SaveQueueType, tracingId: str timeout: delay(PUSH_THROTTLE_TIME), forcePush: take("SAVE_NOW"), }); - yield* put(setSaveBusyAction(true, saveQueueType, tracingId)); + yield* put(setSaveBusyAction(true)); // Send (parts) of the save queue to the server. // There are two main cases: @@ -105,22 +103,22 @@ export function* pushSaveQueueAsync(saveQueueType: SaveQueueType, tracingId: str // ignored (they will be picked up in the next iteration of this loop). // Otherwise, the risk of a high number of save-requests (see case 1) // would be present here, too (note the risk would be greater, because the - // user didn't use the save button which is usually accompanied a small pause). + // user didn't use the save button which is usually accompanied by a small pause). const itemCountToSave = forcePush ? Number.POSITIVE_INFINITY - : yield* select((state) => selectQueue(state, saveQueueType, tracingId).length); + : yield* select((state) => state.save.queue.length); let savedItemCount = 0; while (savedItemCount < itemCountToSave) { - saveQueue = yield* select((state) => selectQueue(state, saveQueueType, tracingId)); + saveQueue = yield* select((state) => state.save.queue); if (saveQueue.length > 0) { - savedItemCount += yield* call(sendRequestToServer, saveQueueType, tracingId); + savedItemCount += yield* call(sendSaveRequestToServer); } else { break; } } - yield* put(setSaveBusyAction(false, saveQueueType, tracingId)); + yield* put(setSaveBusyAction(false)); } } export function sendRequestWithToken( @@ -132,17 +130,14 @@ export function sendRequestWithToken( // This function returns the first n batches of the provided array, so that the count of // all actions in these n batches does not exceed MAXIMUM_ACTION_COUNT_PER_SAVE -function sliceAppropriateBatchCount( - batches: Array, - saveQueueType: SaveQueueType, -): Array { +function sliceAppropriateBatchCount(batches: Array): Array { const slicedBatches = []; let actionCount = 0; for (const batch of batches) { const newActionCount = actionCount + batch.actions.length; - if (newActionCount <= MAXIMUM_ACTION_COUNT_PER_SAVE[saveQueueType]) { + if (newActionCount <= MAXIMUM_ACTION_COUNT_PER_SAVE) { actionCount = newActionCount; slicedBatches.push(batch); } else { @@ -162,22 +157,18 @@ function getRetryWaitTime(retryCount: number) { // at any time, because the browser page is reloaded after the message is shown, anyway. let didShowFailedSimultaneousTracingError = false; -export function* sendRequestToServer( - saveQueueType: SaveQueueType, - tracingId: string, -): Saga { +export function* sendSaveRequestToServer(): Saga { /* * Saves a reasonably-sized part of the save queue (that corresponds to the * tracingId) to the server (plus retry-mechanism). * The saga returns the number of save queue items that were saved. */ - const fullSaveQueue = yield* select((state) => selectQueue(state, saveQueueType, tracingId)); - const saveQueue = sliceAppropriateBatchCount(fullSaveQueue, saveQueueType); + const fullSaveQueue = yield* select((state) => state.save.queue); + const saveQueue = sliceAppropriateBatchCount(fullSaveQueue); let compactedSaveQueue = compactSaveQueue(saveQueue); - const { version, type } = yield* select((state) => - selectTracing(state, saveQueueType, tracingId), - ); + const version = yield* select((state) => state.tracing.version); + const annotationId = yield* select((state) => state.tracing.annotationId); const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); let versionIncrement; [compactedSaveQueue, versionIncrement] = addVersionNumbers(compactedSaveQueue, version); @@ -191,7 +182,8 @@ export function* sendRequestToServer( const startTime = Date.now(); yield* call( sendRequestWithToken, - `${tracingStoreUrl}/tracings/${type}/${tracingId}/update?token=`, + + `${tracingStoreUrl}/tracings/annotation/${annotationId}/update?token=`, { method: "POST", data: compactedSaveQueue, @@ -212,19 +204,17 @@ export function* sendRequestToServer( ); } - yield* put(setVersionNumberAction(version + versionIncrement, saveQueueType, tracingId)); - yield* put(setLastSaveTimestampAction(saveQueueType, tracingId)); - yield* put(shiftSaveQueueAction(saveQueue.length, saveQueueType, tracingId)); - - if (saveQueueType === "volume") { - try { - yield* call(markBucketsAsNotDirty, compactedSaveQueue, tracingId); - } catch (error) { - // If markBucketsAsNotDirty fails some reason, wk cannot recover from this error. - console.warn("Error when marking buckets as clean. No retry possible. Error:", error); - exceptionDuringMarkBucketsAsNotDirty = true; - throw error; - } + yield* put(setVersionNumberAction(version + versionIncrement)); + yield* put(setLastSaveTimestampAction()); + yield* put(shiftSaveQueueAction(saveQueue.length)); + + try { + yield* call(markBucketsAsNotDirty, compactedSaveQueue); + } catch (error) { + // If markBucketsAsNotDirty fails some reason, wk cannot recover from this error. + console.warn("Error when marking buckets as clean. No retry possible. Error:", error); + exceptionDuringMarkBucketsAsNotDirty = true; + throw error; } yield* call(toggleErrorHighlighting, false); @@ -289,33 +279,32 @@ export function* sendRequestToServer( } } -function* markBucketsAsNotDirty(saveQueue: Array, tracingId: string) { - const segmentationLayer = Model.getSegmentationTracingLayer(tracingId); - const segmentationMagInfo = yield* call(getMagInfo, segmentationLayer.mags); - - if (segmentationLayer != null) { - for (const saveEntry of saveQueue) { - for (const updateAction of saveEntry.actions) { - if (updateAction.name === "updateBucket") { - const { position, mag, additionalCoordinates } = updateAction.value; - const magIndex = segmentationMagInfo.getIndexByMag(mag); - const zoomedBucketAddress = globalPositionToBucketPosition( - position, - segmentationMagInfo.getDenseMags(), - magIndex, - additionalCoordinates, - ); - const bucket = segmentationLayer.cube.getOrCreateBucket(zoomedBucketAddress); - - if (bucket.type === "null") { - continue; - } - - bucket.dirtyCount--; - - if (bucket.dirtyCount === 0) { - bucket.markAsPushed(); - } +function* markBucketsAsNotDirty(saveQueue: Array) { + for (const saveEntry of saveQueue) { + for (const updateAction of saveEntry.actions) { + if (updateAction.name === "updateBucket") { + const { actionTracingId: tracingId } = updateAction.value; + const segmentationLayer = Model.getSegmentationTracingLayer(tracingId); + const segmentationMagInfo = yield* call(getMagInfo, segmentationLayer.mags); + + const { position, mag, additionalCoordinates } = updateAction.value; + const magIndex = segmentationMagInfo.getIndexByMag(mag); + const zoomedBucketAddress = globalPositionToBucketPosition( + position, + segmentationMagInfo.getDenseMags(), + magIndex, + additionalCoordinates, + ); + const bucket = segmentationLayer.cube.getOrCreateBucket(zoomedBucketAddress); + + if (bucket.type === "null") { + continue; + } + + bucket.dirtyCount--; + + if (bucket.dirtyCount === 0) { + bucket.markAsPushed(); } } } @@ -357,8 +346,8 @@ export function performDiffTracing( flycam: Flycam, prevTdCamera: CameraData, tdCamera: CameraData, -): Array { - let actions: Array = []; +): Array { + let actions: Array = []; if (prevTracing.type === "skeleton" && tracing.type === "skeleton") { actions = actions.concat( @@ -380,37 +369,28 @@ export function performDiffTracing( } export function* saveTracingAsync(): Saga { + yield* fork(pushSaveQueueAsync); yield* takeEvery("INITIALIZE_SKELETONTRACING", setupSavingForTracingType); yield* takeEvery("INITIALIZE_VOLUMETRACING", setupSavingForTracingType); - yield* takeEvery("INITIALIZE_EDITABLE_MAPPING", setupSavingForEditableMapping); } -export function* setupSavingForEditableMapping( - initializeAction: InitializeEditableMappingAction, -): Saga { - // No diffing needs to be done for editable mappings as the saga pushes update actions - // to the respective save queues, itself - const volumeTracingId = initializeAction.mapping.tracingId; - yield* fork(pushSaveQueueAsync, "mapping", volumeTracingId); -} export function* setupSavingForTracingType( initializeAction: InitializeSkeletonTracingAction | InitializeVolumeTracingAction, ): Saga { /* Listen to changes to the annotation and derive UpdateActions from the old and new state. - The actual push to the server is done by the forked pushSaveQueueAsync saga. + The actual push to the server is done by the forked pushSaveQueueAsync saga. */ const saveQueueType = initializeAction.type === "INITIALIZE_SKELETONTRACING" ? "skeleton" : "volume"; const tracingId = initializeAction.tracing.id; - yield* fork(pushSaveQueueAsync, saveQueueType, tracingId); let prevTracing = (yield* select((state) => selectTracing(state, saveQueueType, tracingId))) as | VolumeTracing | SkeletonTracing; let prevFlycam = yield* select((state) => state.flycam); let prevTdCamera = yield* select((state) => state.viewModeData.plane.tdCamera); - yield* take("WK_READY"); + yield* call(ensureWkReady); while (true) { if (saveQueueType === "skeleton") { @@ -456,7 +436,7 @@ export function* setupSavingForTracingType( ); if (items.length > 0) { - yield* put(pushSaveQueueTransaction(items, saveQueueType, tracingId)); + yield* put(pushSaveQueueTransaction(items)); } prevTracing = tracing; @@ -498,32 +478,29 @@ function* watchForSaveConflicts() { const maybeSkeletonTracing = yield* select((state) => state.tracing.skeleton); const volumeTracings = yield* select((state) => state.tracing.volumes); const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); + const annotationId = yield* select((state) => state.tracing.annotationId); const tracings: Array = _.compact([ ...volumeTracings, maybeSkeletonTracing, ]); - for (const tracing of tracings) { - const versionOnServer = yield* call( - getNewestVersionForTracing, - tracingStoreUrl, - tracing.tracingId, - tracing.type, - ); + if (tracings.length === 0) { + return; + } + + const versionOnServer = yield* call( + getNewestVersionForAnnotation, + tracingStoreUrl, + annotationId, + ); + for (const tracing of tracings) { // Read the tracing version again from the store, since the // old reference to tracing might be outdated now due to the // immutability. const versionOnClient = yield* select((state) => { - if (tracing.type === "volume") { - return getVolumeTracingById(state.tracing, tracing.tracingId).version; - } - const { skeleton } = state.tracing; - if (skeleton == null) { - throw new Error("Skeleton must exist at this point."); - } - return skeleton.version; + return state.tracing.version; }); const toastKey = `save_conflicts_warning_${tracing.tracingId}`; @@ -531,9 +508,7 @@ function* watchForSaveConflicts() { // The latest version on the server is greater than the most-recently // stored version. - const saveQueue = yield* select((state) => - selectQueue(state, tracing.type, tracing.tracingId), - ); + const saveQueue = yield* select((state) => state.save.queue); let msg = ""; if (!allowSave) { @@ -573,6 +548,8 @@ function* watchForSaveConflicts() { return VERSION_POLL_INTERVAL_SINGLE_EDITOR; } + yield* call(ensureWkReady); + while (true) { const interval = yield* call(getPollInterval); yield* call(sleep, interval); diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts b/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts index 0fdc776eb2c..dbc7dba6729 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts @@ -11,14 +11,13 @@ export const UNDO_HISTORY_SIZE = 20; export const SETTINGS_RETRY_DELAY = 15 * 1000; export const SETTINGS_MAX_RETRY_COUNT = 20; // 20 * 15s == 5m -export const MAXIMUM_ACTION_COUNT_PER_BATCH = { - skeleton: 5000, - volume: 1000, // Since volume saving is slower, use a lower value here. - mapping: Number.POSITIVE_INFINITY, // The back-end does not accept transactions for mappings. -} as const; +export const MAXIMUM_ACTION_COUNT_PER_BATCH = 1000; -export const MAXIMUM_ACTION_COUNT_PER_SAVE = { - skeleton: 15000, - volume: 3000, - mapping: Number.POSITIVE_INFINITY, // The back-end does not accept transactions for mappings. -} as const; +// See #8274. +// This constant used to be the following: +// export const MAXIMUM_ACTION_COUNT_PER_SAVE = { +// skeleton: 15000, +// volume: 3000, +// mapping: Number.POSITIVE_INFINITY, // The back-end does not accept transactions for mappings. +// } as const; +export const MAXIMUM_ACTION_COUNT_PER_SAVE = 3000; diff --git a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts index 287f71350dd..f532b4d0021 100644 --- a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts @@ -14,7 +14,7 @@ import { race, } from "typed-redux-saga"; import { select } from "oxalis/model/sagas/effect-generators"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "oxalis/model/sagas/update_actions"; import { TreeTypeEnum } from "oxalis/constants"; import { createEdge, @@ -27,7 +27,7 @@ import { updateTreeEdgesVisibility, updateNode, updateSkeletonTracing, - updateUserBoundingBoxes, + updateUserBoundingBoxesInSkeletonTracing, updateTree, updateTreeGroups, } from "oxalis/model/sagas/update_actions"; @@ -57,7 +57,6 @@ import { setPositionAction, setRotationAction, } from "oxalis/model/actions/flycam_actions"; -import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; import DiffableMap, { diffDiffableMaps } from "libs/diffable_map"; import EdgeCollection, { diffEdgeCollections } from "oxalis/model/edge_collection"; import ErrorHandling from "libs/error_handling"; @@ -86,6 +85,7 @@ import { } from "oxalis/model/actions/connectome_actions"; import type { ServerSkeletonTracing } from "types/api_flow_types"; import memoizeOne from "memoize-one"; +import { ensureWkReady } from "./ready_sagas"; function* centerActiveNode(action: Action): Saga { if ("suppressCentering" in action && action.suppressCentering) { @@ -215,18 +215,12 @@ export function* watchTreeNames(): Saga { } } } -export function* checkVersionRestoreParam(): Saga { - const showVersionRestore = yield* call(Utils.hasUrlParam, "showVersionRestore"); - if (showVersionRestore) { - yield* put(setVersionRestoreVisibilityAction(true)); - } -} export function* watchAgglomerateLoading(): Saga { // Buffer actions since they might be dispatched before WK_READY const channel = yield* actionChannel("LOAD_AGGLOMERATE_SKELETON"); yield* take("INITIALIZE_SKELETONTRACING"); - yield* take("WK_READY"); + yield* call(ensureWkReady); yield* takeEvery(channel, loadAgglomerateSkeletonWithId); } export function* watchConnectomeAgglomerateLoading(): Saga { @@ -250,9 +244,7 @@ function* getAgglomerateSkeletonTracing( const annotation = yield* select((state) => state.tracing); const layerInfo = getLayerByName(dataset, layerName); - const editableMapping = annotation.mappings.find( - (mapping) => mapping.mappingName === mappingName, - ); + const editableMapping = annotation.mappings.find((mapping) => mapping.tracingId === mappingName); try { let nmlProtoBuffer; @@ -477,14 +469,14 @@ export function* watchSkeletonTracingAsync(): Saga { yield* throttle(5000, "PUSH_SAVE_QUEUE_TRANSACTION", watchTracingConsistency); yield* fork(watchFailedNodeCreations); yield* fork(watchBranchPointDeletion); - yield* fork(checkVersionRestoreParam); } function* diffNodes( + tracingId: string, prevNodes: NodeMap, nodes: NodeMap, treeId: number, -): Generator { +): Generator { if (prevNodes === nodes) return; const { onlyA: deletedNodeIds, @@ -493,12 +485,12 @@ function* diffNodes( } = diffDiffableMaps(prevNodes, nodes); for (const nodeId of deletedNodeIds) { - yield deleteNode(treeId, nodeId); + yield deleteNode(treeId, nodeId, tracingId); } for (const nodeId of addedNodeIds) { const node = nodes.getOrThrow(nodeId); - yield createNode(treeId, node); + yield createNode(treeId, node, tracingId); } for (const nodeId of changedNodeIds) { @@ -506,7 +498,7 @@ function* diffNodes( const prevNode = prevNodes.getOrThrow(nodeId); if (updateNodePredicate(prevNode, node)) { - yield updateNode(treeId, node); + yield updateNode(treeId, node, tracingId); } } } @@ -516,19 +508,20 @@ function updateNodePredicate(prevNode: Node, node: Node): boolean { } function* diffEdges( + tracingId: string, prevEdges: EdgeCollection, edges: EdgeCollection, treeId: number, -): Generator { +): Generator { if (prevEdges === edges) return; const { onlyA: deletedEdges, onlyB: addedEdges } = diffEdgeCollections(prevEdges, edges); for (const edge of deletedEdges) { - yield deleteEdge(treeId, edge.source, edge.target); + yield deleteEdge(treeId, edge.source, edge.target, tracingId); } for (const edge of addedEdges) { - yield createEdge(treeId, edge.source, edge.target); + yield createEdge(treeId, edge.source, edge.target, tracingId); } } @@ -559,9 +552,10 @@ function updateTreePredicate(prevTree: Tree, tree: Tree): boolean { } export function* diffTrees( + tracingId: string, prevTrees: TreeMap, trees: TreeMap, -): Generator { +): Generator { if (prevTrees === trees) return; const { onlyA: deletedTreeIds, @@ -574,16 +568,16 @@ export function* diffTrees( for (const treeId of deletedTreeIds) { const prevTree = prevTrees[treeId]; - yield* diffNodes(prevTree.nodes, new DiffableMap(), treeId); - yield* diffEdges(prevTree.edges, new EdgeCollection(), treeId); - yield deleteTree(treeId); + yield* diffNodes(tracingId, prevTree.nodes, new DiffableMap(), treeId); + yield* diffEdges(tracingId, prevTree.edges, new EdgeCollection(), treeId); + yield deleteTree(treeId, tracingId); } for (const treeId of addedTreeIds) { const tree = trees[treeId]; - yield createTree(tree); - yield* diffNodes(new DiffableMap(), tree.nodes, treeId); - yield* diffEdges(new EdgeCollection(), tree.edges, treeId); + yield createTree(tree, tracingId); + yield* diffNodes(tracingId, new DiffableMap(), tree.nodes, treeId); + yield* diffEdges(tracingId, new EdgeCollection(), tree.edges, treeId); } for (const treeId of bothTreeIds) { @@ -591,25 +585,25 @@ export function* diffTrees( const prevTree: Tree = prevTrees[treeId]; if (tree !== prevTree) { - yield* diffNodes(prevTree.nodes, tree.nodes, treeId); - yield* diffEdges(prevTree.edges, tree.edges, treeId); + yield* diffNodes(tracingId, prevTree.nodes, tree.nodes, treeId); + yield* diffEdges(tracingId, prevTree.edges, tree.edges, treeId); if (updateTreePredicate(prevTree, tree)) { - yield updateTree(tree); + yield updateTree(tree, tracingId); } if (prevTree.isVisible !== tree.isVisible) { - yield updateTreeVisibility(tree); + yield updateTreeVisibility(tree, tracingId); } if (prevTree.edgesAreVisible !== tree.edgesAreVisible) { - yield updateTreeEdgesVisibility(tree); + yield updateTreeEdgesVisibility(tree, tracingId); } } } } -export const cachedDiffTrees = memoizeOne((prevTrees: TreeMap, trees: TreeMap) => - Array.from(diffTrees(prevTrees, trees)), +export const cachedDiffTrees = memoizeOne((tracingId: string, prevTrees: TreeMap, trees: TreeMap) => + Array.from(diffTrees(tracingId, prevTrees, trees)), ); export function* diffSkeletonTracing( @@ -617,14 +611,18 @@ export function* diffSkeletonTracing( skeletonTracing: SkeletonTracing, prevFlycam: Flycam, flycam: Flycam, -): Generator { +): Generator { if (prevSkeletonTracing !== skeletonTracing) { - for (const action of cachedDiffTrees(prevSkeletonTracing.trees, skeletonTracing.trees)) { + for (const action of cachedDiffTrees( + skeletonTracing.tracingId, + prevSkeletonTracing.trees, + skeletonTracing.trees, + )) { yield action; } if (prevSkeletonTracing.treeGroups !== skeletonTracing.treeGroups) { - yield updateTreeGroups(skeletonTracing.treeGroups); + yield updateTreeGroups(skeletonTracing.treeGroups, skeletonTracing.tracingId); } } @@ -639,7 +637,10 @@ export function* diffSkeletonTracing( } if (!_.isEqual(prevSkeletonTracing.userBoundingBoxes, skeletonTracing.userBoundingBoxes)) { - yield updateUserBoundingBoxes(skeletonTracing.userBoundingBoxes); + yield updateUserBoundingBoxesInSkeletonTracing( + skeletonTracing.userBoundingBoxes, + skeletonTracing.tracingId, + ); } } export default [ diff --git a/frontend/javascripts/oxalis/model/sagas/task_saga.tsx b/frontend/javascripts/oxalis/model/sagas/task_saga.tsx index 05c86faa522..57d39051852 100644 --- a/frontend/javascripts/oxalis/model/sagas/task_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/task_saga.tsx @@ -26,6 +26,7 @@ import Store, { type RecommendedConfiguration } from "oxalis/store"; import Toast from "libs/toast"; import messages from "messages"; import renderIndependently from "libs/render_independently"; +import { ensureWkReady } from "./ready_sagas"; function* maybeShowNewTaskTypeModal(taskType: APITaskType): Saga { // Users can acquire new tasks directly in the tracing view. Occasionally, @@ -130,7 +131,7 @@ function* maybeActivateMergerMode(taskType: APITaskType): Saga { } export default function* watchTasksAsync(): Saga { - yield* take("WK_READY"); + yield* call(ensureWkReady); const task = yield* select((state) => state.task); const activeUser = yield* select((state) => state.activeUser); const allowUpdate = yield* select((state) => state.tracing.restrictions.allowUpdate); @@ -201,7 +202,7 @@ export function* warnAboutMagRestriction(): Saga { } } - yield* take("WK_READY"); + yield* call(ensureWkReady); // Wait before showing the initial warning. Due to initialization lag it may only be visible very briefly, otherwise. yield* delay(5000); yield* warnMaybe(); diff --git a/frontend/javascripts/oxalis/model/sagas/undo_saga.ts b/frontend/javascripts/oxalis/model/sagas/undo_saga.ts index fc88d2a3e1b..89fdeaef6a7 100644 --- a/frontend/javascripts/oxalis/model/sagas/undo_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/undo_saga.ts @@ -48,6 +48,7 @@ import { Model } from "oxalis/singletons"; import type { SegmentGroup, SegmentMap, SkeletonTracing, UserBoundingBox } from "oxalis/store"; import type { Task } from "redux-saga"; import { actionChannel, all, call, delay, fork, join, put, take } from "typed-redux-saga"; +import { ensureWkReady } from "./ready_sagas"; const UndoRedoRelevantBoundingBoxActions = AllUserBoundingBoxActions.filter( (action) => action !== "SET_USER_BOUNDING_BOXES", @@ -175,7 +176,7 @@ export function* manageUndoStates(): Saga { } > = {}; - yield* take("WK_READY"); + yield* call(ensureWkReady); // Initialization of the local state variables from above. prevSkeletonTracingOrNull = yield* select((state) => state.tracing.skeleton); diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index d2cf38aeb20..108313abd7e 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -10,7 +10,11 @@ import type { NumberLike, } from "oxalis/store"; import { convertUserBoundingBoxesFromFrontendToServer } from "oxalis/model/reducers/reducer_helpers"; -import type { AdditionalCoordinate, MetadataEntryProto } from "types/api_flow_types"; +import type { + AdditionalCoordinate, + APIMagRestrictions, + MetadataEntryProto, +} from "types/api_flow_types"; export type NodeWithTreeId = { treeId: number; @@ -34,9 +38,14 @@ export type CreateSegmentUpdateAction = ReturnType; export type DeleteSegmentUpdateAction = ReturnType; export type DeleteSegmentDataUpdateAction = ReturnType; -type UpdateUserBoundingBoxesUpdateAction = ReturnType; +type UpdateUserBoundingBoxesInSkeletonTracingUpdateAction = ReturnType< + typeof updateUserBoundingBoxesInSkeletonTracing +>; +type UpdateUserBoundingBoxesInVolumeTracingUpdateAction = ReturnType< + typeof updateUserBoundingBoxesInVolumeTracing +>; export type UpdateBucketUpdateAction = ReturnType; -type UpdateSegmentGroupsUpdateAction = ReturnType; +export type UpdateSegmentGroupsUpdateAction = ReturnType; type UpdateTreeGroupsUpdateAction = ReturnType; @@ -46,10 +55,23 @@ export type RevertToVersionUpdateAction = ReturnType; export type RemoveFallbackLayerUpdateAction = ReturnType; export type UpdateTdCameraUpdateAction = ReturnType; export type UpdateMappingNameUpdateAction = ReturnType; +export type AddLayerToAnnotationUpdateAction = ReturnType; +export type DeleteAnnotationLayerUpdateAction = ReturnType; +export type UpdateAnnotationLayerNameUpdateAction = ReturnType; +export type UpdateMetadataOfAnnotationUpdateAction = ReturnType; export type SplitAgglomerateUpdateAction = ReturnType; export type MergeAgglomerateUpdateAction = ReturnType; +// There are two types of UpdateActions. The ones that *need* to be in a separate transaction +// group. And the ones that don't have this requirement. export type UpdateAction = + | UpdateActionWithoutIsolationRequirement + | UpdateActionWithIsolationRequirement; + +export type UpdateActionWithIsolationRequirement = + | RevertToVersionUpdateAction + | AddLayerToAnnotationUpdateAction; +export type UpdateActionWithoutIsolationRequirement = | UpdateTreeUpdateAction | DeleteTreeUpdateAction | MergeTreeUpdateAction @@ -61,7 +83,8 @@ export type UpdateAction = | DeleteEdgeUpdateAction | UpdateSkeletonTracingUpdateAction | UpdateVolumeTracingUpdateAction - | UpdateUserBoundingBoxesUpdateAction + | UpdateUserBoundingBoxesInSkeletonTracingUpdateAction + | UpdateUserBoundingBoxesInVolumeTracingUpdateAction | CreateSegmentUpdateAction | UpdateSegmentUpdateAction | DeleteSegmentUpdateAction @@ -70,14 +93,17 @@ export type UpdateAction = | UpdateTreeVisibilityUpdateAction | UpdateTreeEdgesVisibilityUpdateAction | UpdateTreeGroupVisibilityUpdateAction - | RevertToVersionUpdateAction | UpdateSegmentGroupsUpdateAction | UpdateTreeGroupsUpdateAction | RemoveFallbackLayerUpdateAction | UpdateTdCameraUpdateAction | UpdateMappingNameUpdateAction + | DeleteAnnotationLayerUpdateAction + | UpdateAnnotationLayerNameUpdateAction + | UpdateMetadataOfAnnotationUpdateAction | SplitAgglomerateUpdateAction | MergeAgglomerateUpdateAction; + // This update action is only created in the frontend for display purposes type CreateTracingUpdateAction = { name: "createTracing"; @@ -91,11 +117,13 @@ type ImportVolumeTracingUpdateAction = { value: { largestSegmentId: number; }; -}; // This update action is only created by the backend -type AddSegmentIndexUpdateAction = { +}; +// This update action is only created by the backend +export type AddSegmentIndexUpdateAction = { name: "addSegmentIndex"; value: { actionTimestamp: number; + actionTracingId: string; }; }; type AddServerValuesFn = (arg0: T) => T & { @@ -107,6 +135,8 @@ type AddServerValuesFn = (arg0: T) => T & { type AsServerAction = ReturnType>; +// When the server delivers update actions (e.g., when requesting the version history +// of an annotation), ServerUpdateActions are sent which include some additional information. export type ServerUpdateAction = AsServerAction< | UpdateAction // These two actions are never sent by the frontend and, therefore, don't exist in the UpdateAction type @@ -115,10 +145,11 @@ export type ServerUpdateAction = AsServerAction< | CreateTracingUpdateAction >; -export function createTree(tree: Tree) { +export function createTree(tree: Tree, actionTracingId: string) { return { name: "createTree", value: { + actionTracingId, id: tree.treeId, updatedId: undefined, color: tree.color, @@ -134,18 +165,20 @@ export function createTree(tree: Tree) { }, } as const; } -export function deleteTree(treeId: number) { +export function deleteTree(treeId: number, actionTracingId: string) { return { name: "deleteTree", value: { + actionTracingId, id: treeId, }, } as const; } -export function updateTree(tree: Tree) { +export function updateTree(tree: Tree, actionTracingId: string) { return { name: "updateTree", value: { + actionTracingId, id: tree.treeId, updatedId: tree.treeId, color: tree.color, @@ -161,58 +194,78 @@ export function updateTree(tree: Tree) { }, } as const; } -export function updateTreeVisibility(tree: Tree) { +export function updateTreeVisibility(tree: Tree, actionTracingId: string) { const { treeId, isVisible } = tree; return { name: "updateTreeVisibility", value: { + actionTracingId, treeId, isVisible, }, } as const; } -export function updateTreeEdgesVisibility(tree: Tree) { +export function updateTreeEdgesVisibility(tree: Tree, actionTracingId: string) { const { treeId, edgesAreVisible } = tree; return { name: "updateTreeEdgesVisibility", value: { + actionTracingId, treeId, edgesAreVisible, }, } as const; } -export function updateTreeGroupVisibility(groupId: number | null | undefined, isVisible: boolean) { +export function updateTreeGroupVisibility( + groupId: number | null | undefined, + isVisible: boolean, + actionTracingId: string, +) { return { name: "updateTreeGroupVisibility", value: { + actionTracingId, treeGroupId: groupId, isVisible, }, } as const; } -export function mergeTree(sourceTreeId: number, targetTreeId: number) { +export function mergeTree(sourceTreeId: number, targetTreeId: number, actionTracingId: string) { return { name: "mergeTree", value: { + actionTracingId, sourceId: sourceTreeId, targetId: targetTreeId, }, } as const; } -export function createEdge(treeId: number, sourceNodeId: number, targetNodeId: number) { +export function createEdge( + treeId: number, + sourceNodeId: number, + targetNodeId: number, + actionTracingId: string, +) { return { name: "createEdge", value: { + actionTracingId, treeId, source: sourceNodeId, target: targetNodeId, }, } as const; } -export function deleteEdge(treeId: number, sourceNodeId: number, targetNodeId: number) { +export function deleteEdge( + treeId: number, + sourceNodeId: number, + targetNodeId: number, + actionTracingId: string, +) { return { name: "deleteEdge", value: { + actionTracingId, treeId, source: sourceNodeId, target: targetNodeId, @@ -231,11 +284,12 @@ export type UpdateActionNode = Omit & { treeId: number; }; -export function createNode(treeId: number, node: Node) { +export function createNode(treeId: number, node: Node, actionTracingId: string) { const { untransformedPosition, mag, ...restNode } = node; return { name: "createNode", value: { + actionTracingId, ...restNode, position: untransformedPosition, treeId, @@ -243,21 +297,23 @@ export function createNode(treeId: number, node: Node) { } as CreateActionNode, } as const; } -export function updateNode(treeId: number, node: Node) { +export function updateNode(treeId: number, node: Node, actionTracingId: string) { const { untransformedPosition, ...restNode } = node; return { name: "updateNode", value: { + actionTracingId, ...restNode, position: untransformedPosition, treeId, } as UpdateActionNode, } as const; } -export function deleteNode(treeId: number, nodeId: number) { +export function deleteNode(treeId: number, nodeId: number, actionTracingId: string) { return { name: "deleteNode", value: { + actionTracingId, treeId, nodeId, }, @@ -265,6 +321,7 @@ export function deleteNode(treeId: number, nodeId: number) { } export function updateSkeletonTracing( tracing: { + tracingId: string; activeNodeId: number | null | undefined; }, editPosition: Vector3, @@ -273,8 +330,9 @@ export function updateSkeletonTracing( zoomLevel: number, ) { return { - name: "updateTracing", + name: "updateSkeletonTracing", value: { + actionTracingId: tracing.tracingId, activeNode: tracing.activeNodeId, editPosition, editPositionAdditionalCoordinates, @@ -287,10 +345,12 @@ export function moveTreeComponent( sourceTreeId: number, targetTreeId: number, nodeIds: Array, + actionTracingId: string, ) { return { name: "moveTreeComponent", value: { + actionTracingId, sourceId: sourceTreeId, targetId: targetTreeId, nodeIds, @@ -305,8 +365,9 @@ export function updateVolumeTracing( zoomLevel: number, ) { return { - name: "updateTracing", + name: "updateVolumeTracing", value: { + actionTracingId: tracing.tracingId, activeSegmentId: tracing.activeCellId, editPosition: position, editPositionAdditionalCoordinates, @@ -316,10 +377,26 @@ export function updateVolumeTracing( }, } as const; } -export function updateUserBoundingBoxes(userBoundingBoxes: Array) { +export function updateUserBoundingBoxesInSkeletonTracing( + userBoundingBoxes: Array, + actionTracingId: string, +) { + return { + name: "updateUserBoundingBoxesInSkeletonTracing", + value: { + actionTracingId, + boundingBoxes: convertUserBoundingBoxesFromFrontendToServer(userBoundingBoxes), + }, + } as const; +} +export function updateUserBoundingBoxesInVolumeTracing( + userBoundingBoxes: Array, + actionTracingId: string, +) { return { - name: "updateUserBoundingBoxes", + name: "updateUserBoundingBoxesInVolumeTracing", value: { + actionTracingId, boundingBoxes: convertUserBoundingBoxesFromFrontendToServer(userBoundingBoxes), }, } as const; @@ -331,11 +408,13 @@ export function createSegmentVolumeAction( color: Vector3 | null, groupId: number | null | undefined, metadata: MetadataEntryProto[], + actionTracingId: string, creationTime: number | null | undefined = Date.now(), ) { return { name: "createSegment", value: { + actionTracingId, id, anchorPosition, name, @@ -355,11 +434,13 @@ export function updateSegmentVolumeAction( color: Vector3 | null, groupId: number | null | undefined, metadata: Array, + actionTracingId: string, creationTime: number | null | undefined = Date.now(), ) { return { name: "updateSegment", value: { + actionTracingId, id, anchorPosition, additionalCoordinates, @@ -371,44 +452,54 @@ export function updateSegmentVolumeAction( }, } as const; } -export function deleteSegmentVolumeAction(id: number) { +export function deleteSegmentVolumeAction(id: number, actionTracingId: string) { return { name: "deleteSegment", value: { + actionTracingId, id, }, } as const; } -export function deleteSegmentDataVolumeAction(id: number) { +export function deleteSegmentDataVolumeAction(id: number, actionTracingId: string) { return { name: "deleteSegmentData", value: { + actionTracingId, id, }, } as const; } -export function updateBucket(bucketInfo: SendBucketInfo, base64Data: string) { +export function updateBucket( + bucketInfo: SendBucketInfo, + base64Data: string, + actionTracingId: string, +) { return { name: "updateBucket", - value: Object.assign({}, bucketInfo, { + value: { + actionTracingId, + ...bucketInfo, base64Data, - }), + }, } as const; } -export function updateSegmentGroups(segmentGroups: Array) { +export function updateSegmentGroups(segmentGroups: Array, actionTracingId: string) { return { name: "updateSegmentGroups", value: { + actionTracingId, segmentGroups, }, } as const; } -export function updateTreeGroups(treeGroups: Array) { +export function updateTreeGroups(treeGroups: Array, actionTracingId: string) { return { name: "updateTreeGroups", value: { + actionTracingId, treeGroups, }, } as const; @@ -421,10 +512,12 @@ export function revertToVersion(version: number) { }, } as const; } -export function removeFallbackLayer() { +export function removeFallbackLayer(actionTracingId: string) { return { name: "removeFallbackLayer", - value: {}, + value: { + actionTracingId, + }, } as const; } export function updateTdCamera() { @@ -445,10 +538,16 @@ export function updateMappingName( mappingName: string | null | undefined, isEditable: boolean | null | undefined, isLocked: boolean | undefined, + actionTracingId: string, ) { return { name: "updateMappingName", - value: { mappingName, isEditable, isLocked }, + value: { + actionTracingId, + mappingName, + isEditable, + isLocked, + }, } as const; } export function splitAgglomerate( @@ -456,9 +555,11 @@ export function splitAgglomerate( segmentId1: NumberLike, segmentId2: NumberLike, mag: Vector3, + actionTracingId: string, ): { name: "splitAgglomerate"; value: { + actionTracingId: string; agglomerateId: number; segmentId1: number | undefined; segmentId2: number | undefined; @@ -473,6 +574,7 @@ export function splitAgglomerate( return { name: "splitAgglomerate", value: { + actionTracingId, // TODO: Proper 64 bit support (#6921) agglomerateId: Number(agglomerateId), segmentId1: Number(segmentId1), @@ -487,9 +589,11 @@ export function mergeAgglomerate( segmentId1: NumberLike, segmentId2: NumberLike, mag: Vector3, + actionTracingId: string, ): { name: "mergeAgglomerate"; value: { + actionTracingId: string; agglomerateId1: number; agglomerateId2: number; segmentId1: number | undefined; @@ -505,6 +609,7 @@ export function mergeAgglomerate( return { name: "mergeAgglomerate", value: { + actionTracingId, // TODO: Proper 64 bit support (#6921) agglomerateId1: Number(agglomerateId1), agglomerateId2: Number(agglomerateId2), @@ -515,6 +620,47 @@ export function mergeAgglomerate( } as const; } +type AnnotationLayerCreationParameters = { + typ: "Skeleton" | "Volume"; + name: string | null | undefined; + autoFallbackLayer?: boolean; + fallbackLayerName?: string | null | undefined; + mappingName?: string | null | undefined; + magRestrictions?: APIMagRestrictions | null | undefined; +}; + +export function addLayerToAnnotation(parameters: AnnotationLayerCreationParameters) { + return { + name: "addLayerToAnnotation", + value: { layerParameters: parameters }, + } as const; +} + +export function deleteAnnotationLayer( + tracingId: string, + layerName: string, + typ: "Skeleton" | "Volume", +) { + return { + name: "deleteLayerFromAnnotation", + value: { tracingId, layerName, typ }, + } as const; +} + +export function updateAnnotationLayerName(tracingId: string, newLayerName: string) { + return { + name: "updateLayerMetadata", + value: { tracingId, layerName: newLayerName }, + } as const; +} + +export function updateMetadataOfAnnotation(description: string) { + return { + name: "updateMetadataOfAnnotation", + value: { description }, + } as const; +} + function enforceValidMetadata(metadata: MetadataEntryProto[]): MetadataEntryProto[] { // We do not want to save metadata with duplicate keys. Validation errors // will warn the user in case this exists. However, we allow duplicate keys in the diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx index 12e1919ac31..3a20d7ec931 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx @@ -87,7 +87,7 @@ import { } from "oxalis/model/sagas/saga_helpers"; import { deleteSegmentDataVolumeAction, - type UpdateAction, + type UpdateActionWithoutIsolationRequirement, updateSegmentGroups, } from "oxalis/model/sagas/update_actions"; import { @@ -95,7 +95,7 @@ import { deleteSegmentVolumeAction, removeFallbackLayer, updateSegmentVolumeAction, - updateUserBoundingBoxes, + updateUserBoundingBoxesInVolumeTracing, updateVolumeTracing, updateMappingName, } from "oxalis/model/sagas/update_actions"; @@ -113,11 +113,12 @@ import maybeInterpolateSegmentationLayer from "./volume/volume_interpolation_sag import messages from "messages"; import { pushSaveQueueTransaction } from "../actions/save_actions"; import type { ActionPattern } from "redux-saga/effects"; +import { ensureWkReady } from "./ready_sagas"; const OVERWRITE_EMPTY_WARNING_KEY = "OVERWRITE-EMPTY-WARNING"; export function* watchVolumeTracingAsync(): Saga { - yield* take("WK_READY"); + yield* call(ensureWkReady); yield* takeEveryUnlessBusy( "INTERPOLATE_SEGMENTATION_LAYER", maybeInterpolateSegmentationLayer, @@ -634,14 +635,15 @@ function updateTracingPredicate( } export const cachedDiffSegmentLists = memoizeOne( - (prevSegments: SegmentMap, newSegments: SegmentMap) => - Array.from(uncachedDiffSegmentLists(prevSegments, newSegments)), + (tracingId: string, prevSegments: SegmentMap, newSegments: SegmentMap) => + Array.from(uncachedDiffSegmentLists(tracingId, prevSegments, newSegments)), ); function* uncachedDiffSegmentLists( + tracingId: string, prevSegments: SegmentMap, newSegments: SegmentMap, -): Generator { +): Generator { const { onlyA: deletedSegmentIds, onlyB: addedSegmentIds, @@ -649,7 +651,7 @@ function* uncachedDiffSegmentLists( } = diffDiffableMaps(prevSegments, newSegments); for (const segmentId of deletedSegmentIds) { - yield deleteSegmentVolumeAction(segmentId); + yield deleteSegmentVolumeAction(segmentId, tracingId); } for (const segmentId of addedSegmentIds) { @@ -661,6 +663,7 @@ function* uncachedDiffSegmentLists( segment.color, segment.groupId, segment.metadata, + tracingId, ); } @@ -677,6 +680,7 @@ function* uncachedDiffSegmentLists( segment.color, segment.groupId, segment.metadata, + tracingId, segment.creationTime, ); } @@ -687,7 +691,7 @@ export function* diffVolumeTracing( volumeTracing: VolumeTracing, prevFlycam: Flycam, flycam: Flycam, -): Generator { +): Generator { if (updateTracingPredicate(prevVolumeTracing, volumeTracing, prevFlycam, flycam)) { yield updateVolumeTracing( volumeTracing, @@ -699,12 +703,16 @@ export function* diffVolumeTracing( } if (!_.isEqual(prevVolumeTracing.userBoundingBoxes, volumeTracing.userBoundingBoxes)) { - yield updateUserBoundingBoxes(volumeTracing.userBoundingBoxes); + yield updateUserBoundingBoxesInVolumeTracing( + volumeTracing.userBoundingBoxes, + volumeTracing.tracingId, + ); } if (prevVolumeTracing !== volumeTracing) { if (prevVolumeTracing.segments !== volumeTracing.segments) { for (const action of cachedDiffSegmentLists( + volumeTracing.tracingId, prevVolumeTracing.segments, volumeTracing.segments, )) { @@ -713,11 +721,11 @@ export function* diffVolumeTracing( } if (prevVolumeTracing.segmentGroups !== volumeTracing.segmentGroups) { - yield updateSegmentGroups(volumeTracing.segmentGroups); + yield updateSegmentGroups(volumeTracing.segmentGroups, volumeTracing.tracingId); } if (prevVolumeTracing.fallbackLayer != null && volumeTracing.fallbackLayer == null) { - yield removeFallbackLayer(); + yield removeFallbackLayer(volumeTracing.tracingId); } if ( @@ -730,6 +738,7 @@ export function* diffVolumeTracing( volumeTracing.mappingName || null, volumeTracing.hasEditableMapping || null, volumeTracing.mappingIsLocked, + volumeTracing.tracingId, ); yield action; } @@ -947,11 +956,7 @@ function* handleDeleteSegmentData(): Saga { yield* put(setBusyBlockingInfoAction(true, "Segment is being deleted.")); yield* put( - pushSaveQueueTransaction( - [deleteSegmentDataVolumeAction(action.segmentId)], - "volume", - action.layerName, - ), + pushSaveQueueTransaction([deleteSegmentDataVolumeAction(action.segmentId, action.layerName)]), ); yield* call([Model, Model.ensureSavedState]); diff --git a/frontend/javascripts/oxalis/model/sagas/wk_ready_saga.ts b/frontend/javascripts/oxalis/model/sagas/wk_ready_saga.ts deleted file mode 100644 index b38ddfc02ac..00000000000 --- a/frontend/javascripts/oxalis/model/sagas/wk_ready_saga.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { take, takeEvery } from "typed-redux-saga"; - -let isWkReady = false; - -function setWkReady() { - isWkReady = true; -} -export function* listenForWkReady(): Saga { - yield* takeEvery("WK_READY", setWkReady); -} - -export function* ensureWkReady(): Saga { - // This saga is useful for sagas that might be instantiated before or after - // the WK_READY action was dispatched. If the action was dispatched - // before, this saga immediately returns, otherwise it waits - // until the action is dispatched. - if (isWkReady) return; - yield* take("WK_READY"); -} diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 3be1248d892..6a4ee496a58 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -9,8 +9,8 @@ import type { ServerEditableMapping, APICompoundType, APISegmentationLayer, + APITracingStoreAnnotation, } from "types/api_flow_types"; -import type { Versions } from "oxalis/view/version_view"; import { computeDataTexturesSetup, getSupportedTextureSpecs, @@ -35,7 +35,7 @@ import { getServerVolumeTracings } from "oxalis/model/accessors/volumetracing_ac import { getSomeServerTracing } from "oxalis/model/accessors/tracing_accessor"; import { getTracingsForAnnotation, - getAnnotationInformation, + getUnversionedAnnotationInformation, getEmptySandboxAnnotationInformation, getDataset, getSharingTokenFromUrlParameters, @@ -43,6 +43,7 @@ import { getDatasetViewConfiguration, getEditableMappingInfo, getAnnotationCompoundInformation, + getAnnotationProto, } from "admin/admin_rest_api"; import { dispatchMaybeFetchMeshFilesAsync, @@ -104,7 +105,11 @@ import { PricingPlanEnum, isFeatureAllowedByPricingPlan, } from "admin/organization/pricing_plan_utils"; -import { convertServerAdditionalAxesToFrontEnd } from "./model/reducers/reducer_helpers"; +import { + convertServerAdditionalAxesToFrontEnd, + convertServerAnnotationToFrontendAnnotation, +} from "./model/reducers/reducer_helpers"; +import { setVersionNumberAction } from "./model/actions/save_actions"; export const HANDLED_ERROR = "error_was_handled"; type DataLayerCollection = Record; @@ -113,7 +118,7 @@ export async function initialize( initialMaybeCompoundType: APICompoundType | null, initialCommandType: TraceOrViewCommand, initialFetch: boolean, - versions?: Versions, + version?: number | undefined | null, ): Promise< | { dataLayers: DataLayerCollection; @@ -124,14 +129,44 @@ export async function initialize( > { Store.dispatch(setControlModeAction(initialCommandType.type)); let annotation: APIAnnotation | null | undefined; + let annotationProto: APITracingStoreAnnotation | null | undefined; let datasetId: string; if (initialCommandType.type === ControlModeEnum.TRACE) { const { annotationId } = initialCommandType; - annotation = - initialMaybeCompoundType != null - ? await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType) - : await getAnnotationInformation(annotationId); + if (initialMaybeCompoundType != null) { + annotation = await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType); + } else { + let unversionedAnnotation = await getUnversionedAnnotationInformation(annotationId); + annotationProto = await getAnnotationProto( + unversionedAnnotation.tracingStore.url, + unversionedAnnotation.id, + version, + ); + const layersWithStats = annotationProto.annotationLayers.map((protoLayer) => { + return { + tracingId: protoLayer.tracingId, + name: protoLayer.name, + typ: protoLayer.typ, + stats: + // Only when the newest version is requested (version==null), + // the stats are available in unversionedAnnotation. + version == null + ? _.find( + unversionedAnnotation.annotationLayers, + (layer) => layer.tracingId === protoLayer.tracingId, + )?.stats ?? {} + : {}, + }; + }); + const completeAnnotation = { + ...unversionedAnnotation, + description: annotationProto.description, + annotationProto: annotationProto.earliestAccessibleVersion, + annotationLayers: layersWithStats, + }; + annotation = completeAnnotation; + } datasetId = annotation.datasetId; if (!annotation.restrictions.allowAccess) { @@ -157,7 +192,7 @@ export async function initialize( const [dataset, initialUserSettings, serverTracings] = await fetchParallel( annotation, datasetId, - versions, + version, ); const serverVolumeTracings = getServerVolumeTracings(serverTracings); const serverVolumeTracingIds = serverVolumeTracings.map((volumeTracing) => volumeTracing.id); @@ -202,8 +237,15 @@ export async function initialize( const editableMappings = await fetchEditableMappings( annotation.tracingStore.url, serverVolumeTracings, + annotation.id, + ); + initializeAnnotation( + annotation, + annotationProto?.version ?? 1, + annotationProto?.earliestAccessibleVersion ?? 0, + serverTracings, + editableMappings, ); - initializeTracing(annotation, serverTracings, editableMappings); } else { // In view only tracings we need to set the view mode too. const { allowedModes } = determineAllowedModes(); @@ -225,22 +267,23 @@ export async function initialize( async function fetchParallel( annotation: APIAnnotation | null | undefined, datasetId: string, - versions?: Versions, + version: number | undefined | null, ): Promise<[APIDataset, UserConfiguration, Array]> { return Promise.all([ getDataset(datasetId, getSharingTokenFromUrlParameters()), getUserConfiguration(), // Fetch the actual tracing from the datastore, if there is an skeletonAnnotation - annotation ? getTracingsForAnnotation(annotation, versions) : [], + annotation ? getTracingsForAnnotation(annotation, version) : [], ]); } async function fetchEditableMappings( tracingStoreUrl: string, serverVolumeTracings: ServerVolumeTracing[], + annotationId: string, ): Promise { const promises = serverVolumeTracings .filter((tracing) => tracing.hasEditableMapping) - .map((tracing) => getEditableMappingInfo(tracingStoreUrl, tracing.id)); + .map((tracing) => getEditableMappingInfo(tracingStoreUrl, tracing.id, annotationId)); return Promise.all(promises); } @@ -274,8 +317,10 @@ function maybeWarnAboutUnsupportedLayers(layers: Array): void { } } -function initializeTracing( +function initializeAnnotation( _annotation: APIAnnotation, + version: number, + earliestAccessibleVersion: number, serverTracings: Array, editableMappings: Array, ) { @@ -307,7 +352,11 @@ function initializeTracing( }; } - Store.dispatch(initializeAnnotationAction(annotation)); + Store.dispatch( + initializeAnnotationAction( + convertServerAnnotationToFrontendAnnotation(annotation, version, earliestAccessibleVersion), + ), + ); getServerVolumeTracings(serverTracings).map((volumeTracing) => { ErrorHandling.assert( getSegmentationLayers(dataset).length > 0, @@ -321,11 +370,9 @@ function initializeTracing( const skeletonTracing = getNullableSkeletonTracing(serverTracings); if (skeletonTracing != null) { - // To generate a huge amount of dummy trees, use: - // import generateDummyTrees from "./model/helpers/generate_dummy_trees"; - // tracing.trees = generateDummyTrees(1, 200000); Store.dispatch(initializeSkeletonTracingAction(skeletonTracing)); } + Store.dispatch(setVersionNumberAction(version)); } // Initialize 'flight', 'oblique' or 'orthogonal' mode @@ -452,6 +499,7 @@ function initializeDataLayerInstances(gpuFactor: number | null | undefined): { layer, textureInformation.textureSize, textureInformation.textureCount, + layer.name, // In case of a volume tracing layer the layer name will equal its tracingId. ); } diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index 61762134fd8..19c3cd82110 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -191,10 +191,13 @@ export type AnnotationVisibility = APIAnnotationVisibility; export type RestrictionsAndSettings = Restrictions & Settings; export type Annotation = { readonly annotationId: string; + readonly version: number; + readonly earliestAccessibleVersion: number; readonly restrictions: RestrictionsAndSettings; readonly visibility: AnnotationVisibility; readonly annotationLayers: Array; readonly tags: Array; + readonly stats: TracingStats | null | undefined; readonly description: string; readonly name: string; readonly organization: string; @@ -208,7 +211,6 @@ export type Annotation = { }; type TracingBase = { readonly createdTimestamp: number; - readonly version: number; readonly tracingId: string; readonly boundingBox: BoundingBoxType | null | undefined; readonly userBoundingBoxes: Array; @@ -463,23 +465,10 @@ export type ProgressInfo = { readonly processedActionCount: number; readonly totalActionCount: number; }; -export type IsBusyInfo = { - readonly skeleton: boolean; - readonly volumes: Record; - readonly mappings: Record; -}; export type SaveState = { - readonly isBusyInfo: IsBusyInfo; - readonly queue: { - readonly skeleton: Array; - readonly volumes: Record>; - readonly mappings: Record>; - }; - readonly lastSaveTimestamp: { - readonly skeleton: number; - readonly volumes: Record; - readonly mappings: Record; - }; + readonly isBusy: boolean; + readonly queue: Array; + readonly lastSaveTimestamp: number; readonly progressInfo: ProgressInfo; }; export type Flycam = { diff --git a/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx index c7091569432..ee61904aa51 100644 --- a/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx @@ -355,7 +355,7 @@ function _DownloadModalView({ tracing.annotationId, tracing.annotationType, hasVolumeFallback, - {}, + undefined, fileFormatToDownload, includeVolumeData, ); diff --git a/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx index 9f3f5f52d9d..ad9e746d336 100644 --- a/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx @@ -8,7 +8,7 @@ import { addTreesAndGroupsAction } from "oxalis/model/actions/skeletontracing_ac import { getSkeletonDescriptor } from "oxalis/model/accessors/skeletontracing_accessor"; import { createMutableTreeMapFromTreeArray } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; import { - getAnnotationInformation, + getUnversionedAnnotationInformation, getAnnotationCompoundInformation, getTracingForAnnotationType, } from "admin/admin_rest_api"; @@ -145,7 +145,7 @@ class _MergeModalView extends PureComponent { const { selectedExplorativeAnnotation } = this.state; if (selectedExplorativeAnnotation != null) { - const annotation = await getAnnotationInformation(selectedExplorativeAnnotation); + const annotation = await getUnversionedAnnotationInformation(selectedExplorativeAnnotation); this.mergeAnnotationIntoActiveTracing(annotation); } }; diff --git a/frontend/javascripts/oxalis/view/action-bar/save_button.tsx b/frontend/javascripts/oxalis/view/action-bar/save_button.tsx index e577c802e94..e16722e51f9 100644 --- a/frontend/javascripts/oxalis/view/action-bar/save_button.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/save_button.tsx @@ -2,8 +2,7 @@ import { connect } from "react-redux"; import React from "react"; import _ from "lodash"; import Store, { type SaveState } from "oxalis/store"; -import type { OxalisState, IsBusyInfo } from "oxalis/store"; -import { isBusy } from "oxalis/model/accessors/save_accessor"; +import type { OxalisState } from "oxalis/store"; import ButtonComponent from "oxalis/view/components/button_component"; import { Model } from "oxalis/singletons"; import window from "libs/window"; @@ -14,7 +13,6 @@ import { LoadingOutlined, } from "@ant-design/icons"; import ErrorHandling from "libs/error_handling"; -import * as Utils from "libs/utils"; import FastTooltip from "components/fast_tooltip"; import { Tooltip } from "antd"; import { reuseInstanceOnEquality } from "oxalis/model/accessors/accessor_helpers"; @@ -25,7 +23,7 @@ type OwnProps = { }; type StateProps = { progressFraction: number | null | undefined; - isBusyInfo: IsBusyInfo; + isBusy: boolean; }; type Props = OwnProps & StateProps; type State = { @@ -101,7 +99,7 @@ class SaveButton extends React.PureComponent { getSaveButtonIcon() { if (this.state.isStateSaved) { return ; - } else if (isBusy(this.props.isBusyInfo)) { + } else if (this.props.isBusy) { return ; } else { return ; @@ -109,7 +107,7 @@ class SaveButton extends React.PureComponent { } shouldShowProgress(): boolean { - return isBusy(this.props.isBusyInfo) && this.props.progressFraction != null; + return this.props.isBusy && this.props.progressFraction != null; } render() { @@ -176,27 +174,17 @@ class SaveButton extends React.PureComponent { function getOldestUnsavedTimestamp(saveQueue: SaveState["queue"]): number | null | undefined { let oldestUnsavedTimestamp; - if (saveQueue.skeleton.length > 0) { - oldestUnsavedTimestamp = saveQueue.skeleton[0].timestamp; - } - - for (const volumeQueue of Utils.values(saveQueue.volumes)) { - if (volumeQueue.length > 0) { - const oldestVolumeTimestamp = volumeQueue[0].timestamp; - oldestUnsavedTimestamp = Math.min( - oldestUnsavedTimestamp != null ? oldestUnsavedTimestamp : Number.POSITIVE_INFINITY, - oldestVolumeTimestamp, - ); - } + if (saveQueue.length > 0) { + oldestUnsavedTimestamp = saveQueue[0].timestamp; } return oldestUnsavedTimestamp; } function mapStateToProps(state: OxalisState): StateProps { - const { progressInfo, isBusyInfo } = state.save; + const { progressInfo, isBusy } = state.save; return { - isBusyInfo, + isBusy, // For a low action count, the progress info would show only for a very short amount of time. // Therefore, the progressFraction is set to null, if the count is low. progressFraction: diff --git a/frontend/javascripts/oxalis/view/action_bar_view.tsx b/frontend/javascripts/oxalis/view/action_bar_view.tsx index ef4a68f6a6b..d011c949e5a 100644 --- a/frontend/javascripts/oxalis/view/action_bar_view.tsx +++ b/frontend/javascripts/oxalis/view/action_bar_view.tsx @@ -195,7 +195,7 @@ class ActionBarView extends React.PureComponent { fallbackLayerName, maybeMappingName, ); - location.href = `${location.origin}/annotations/${annotation.typ}/${annotation.id}${location.hash}`; + location.href = `${location.origin}/annotations/${annotation.id}${location.hash}`; }; renderStartAIJobButton(disabled: boolean, tooltipTextIfDisabled: string): React.ReactNode { diff --git a/frontend/javascripts/oxalis/view/components/editable_text_label.tsx b/frontend/javascripts/oxalis/view/components/editable_text_label.tsx index 3e27aa02ce0..112e28df557 100644 --- a/frontend/javascripts/oxalis/view/components/editable_text_label.tsx +++ b/frontend/javascripts/oxalis/view/components/editable_text_label.tsx @@ -15,7 +15,7 @@ type Rule = { }; export type EditableTextLabelProp = { value: string; - onChange: (...args: Array) => any; + onChange: (newValue: string) => any; rules?: Rule[]; rows?: number; markdown?: boolean; diff --git a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx index af01e2885bf..432c960374d 100644 --- a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx +++ b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx @@ -21,7 +21,7 @@ import { getSegmentationLayers, } from "oxalis/model/accessors/dataset_accessor"; import { - getAnnotationInformation, + getUnversionedAnnotationInformation, getDataset, getTracingForAnnotationType, runTraining, @@ -34,7 +34,12 @@ import _ from "lodash"; import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; import { formatVoxels } from "libs/format_utils"; import * as Utils from "libs/utils"; -import type { APIAnnotation, APIDataset, ServerVolumeTracing } from "types/api_flow_types"; +import { + AnnotationLayerEnum, + type APIAnnotation, + type APIDataset, + type ServerVolumeTracing, +} from "types/api_flow_types"; import type { Vector3, Vector6 } from "oxalis/constants"; import { serverVolumeToClientVolumeTracing } from "oxalis/model/reducers/volumetracing_reducer"; import { convertUserBoundingBoxesFromServerToFrontend } from "oxalis/model/reducers/reducer_helpers"; @@ -551,7 +556,7 @@ function AnnotationsCsvInput({ const newAnnotationsWithDatasets = await Promise.all( newItems.map(async (item) => { - const annotation = await getAnnotationInformation(item.annotationId); + const annotation = await getUnversionedAnnotationInformation(item.annotationId); const dataset = await getDataset(annotation.datasetId); const volumeServerTracings: ServerVolumeTracing[] = await Promise.all( @@ -569,7 +574,7 @@ function AnnotationsCsvInput({ let userBoundingBoxes = volumeTracings[0]?.userBoundingBoxes; if (!userBoundingBoxes) { const skeletonLayer = annotation.annotationLayers.find( - (layer) => layer.typ === "Skeleton", + (layer) => layer.typ === AnnotationLayerEnum.Skeleton, ); if (skeletonLayer) { const skeletonTracing = await getTracingForAnnotationType(annotation, skeletonLayer); diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx index 29fb787852b..b83cc8664c1 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx @@ -21,7 +21,9 @@ import _ from "lodash"; import classnames from "classnames"; import update from "immutability-helper"; import { + type AnnotationLayerType, APIAnnotationTypeEnum, + AnnotationLayerEnum, type APIDataLayer, type APIDataset, type APISkeletonLayer, @@ -49,8 +51,6 @@ import { findDataPositionForLayer, clearCache, findDataPositionForVolumeTracing, - convertToHybridTracing, - deleteAnnotationLayer, updateDatasetDefaultConfiguration, startComputeSegmentIndexFileJob, } from "admin/admin_rest_api"; @@ -89,7 +89,6 @@ import { userSettings } from "types/schemas/user_settings.schema"; import type { Vector3, ControlMode } from "oxalis/constants"; import Constants, { ControlModeEnum, MappingStatusEnum } from "oxalis/constants"; import EditableTextLabel from "oxalis/view/components/editable_text_label"; -import LinkButton from "components/link_button"; import { Model } from "oxalis/singletons"; import type { VolumeTracing, @@ -114,7 +113,6 @@ import { } from "messages"; import { MaterializeVolumeAnnotationModal } from "oxalis/view/action-bar/starting_job_modals"; import AddVolumeLayerModal, { validateReadableLayerName } from "./modals/add_volume_layer_modal"; -import DownsampleVolumeModal from "./modals/downsample_volume_modal"; import Histogram, { isHistogramSupported } from "./histogram_view"; import MappingSettingsView from "./mapping_settings_view"; import { confirmAsync } from "../../../dashboard/dataset/helper_components"; @@ -131,6 +129,11 @@ import { getDefaultLayerViewConfiguration, } from "types/schemas/dataset_view_configuration.schema"; import defaultState from "oxalis/default_state"; +import { + pushSaveQueueTransaction, + pushSaveQueueTransactionIsolated, +} from "oxalis/model/actions/save_actions"; +import { addLayerToAnnotation, deleteAnnotationLayer } from "oxalis/model/sagas/update_actions"; type DatasetSettingsProps = { userConfiguration: UserConfiguration; @@ -150,6 +153,8 @@ type DatasetSettingsProps = { onZoomToMag: (layerName: string, arg0: Vector3) => number; onChangeUser: (key: keyof UserConfiguration, value: any) => void; reloadHistogram: (layerName: string) => void; + addSkeletonLayerToAnnotation: () => void; + deleteAnnotationLayer: (tracingId: string, type: AnnotationLayerType, layerName: string) => void; tracing: Tracing; task: Task | null | undefined; onEditAnnotationLayer: (tracingId: string, layerProperties: EditableLayerProperties) => void; @@ -161,9 +166,6 @@ type DatasetSettingsProps = { }; type State = { - // If this is set to not-null, the downsampling modal - // is shown for that VolumeTracing - volumeTracingToDownsample: VolumeTracing | null | undefined; isAddVolumeLayerModalVisible: boolean; preselectedSegmentationLayerName: string | undefined; segmentationLayerWasPreselected: boolean | undefined; @@ -366,7 +368,6 @@ function LayerInfoIconWithTooltip({ class DatasetSettings extends React.PureComponent { onChangeUser: Record) => any>; state: State = { - volumeTracingToDownsample: null, isAddVolumeLayerModalVisible: false, preselectedSegmentationLayerName: undefined, segmentationLayerWasPreselected: false, @@ -453,26 +454,39 @@ class DatasetSettings extends React.PureComponent { ); - getDeleteAnnotationLayerButton = (readableName: string, layer?: APIDataLayer) => ( + getDeleteAnnotationLayerButton = ( + readableName: string, + type: AnnotationLayerType, + tracingId: string, + ) => (
this.deleteAnnotationLayerIfConfirmed(readableName, layer)} + onClick={() => this.deleteAnnotationLayerIfConfirmed(readableName, type, tracingId)} className="fas fa-trash icon-margin-right" />
); - getDeleteAnnotationLayerDropdownOption = (readableName: string, layer?: APIDataLayer) => ( -
this.deleteAnnotationLayerIfConfirmed(readableName, layer)}> + getDeleteAnnotationLayerDropdownOption = ( + readableName: string, + type: AnnotationLayerType, + tracingId: string, + layer?: APIDataLayer, + ) => ( +
this.deleteAnnotationLayerIfConfirmed(readableName, type, tracingId, layer)} + > Delete this annotation layer
); deleteAnnotationLayerIfConfirmed = async ( - readableAnnoationLayerName: string, + readableAnnotationLayerName: string, + type: AnnotationLayerType, + tracingId: string, layer?: APIDataLayer, ) => { const fallbackLayerNote = @@ -481,7 +495,7 @@ class DatasetSettings extends React.PureComponent { : ""; const shouldDelete = await confirmAsync({ title: `Deleting an annotation layer makes its content and history inaccessible. ${fallbackLayerNote}This cannot be undone. Are you sure you want to delete this layer?`, - okText: `Yes, delete annotation layer “${readableAnnoationLayerName}”`, + okText: `Yes, delete annotation layer “${readableAnnotationLayerName}”`, cancelText: "Cancel", maskClosable: true, closable: true, @@ -495,12 +509,8 @@ class DatasetSettings extends React.PureComponent { }, }); if (!shouldDelete) return; + this.props.deleteAnnotationLayer(tracingId, type, readableAnnotationLayerName); await Model.ensureSavedState(); - await deleteAnnotationLayer( - this.props.tracing.annotationId, - this.props.tracing.annotationType, - readableAnnoationLayerName, - ); location.reload(); }; @@ -619,6 +629,8 @@ class DatasetSettings extends React.PureComponent { const { intensityRange } = layerSettings; const layer = getLayerByName(dataset, layerName); const isSegmentation = layer.category === "segmentation"; + const layerType = + layer.category === "segmentation" ? AnnotationLayerEnum.Volume : AnnotationLayerEnum.Skeleton; const canBeMadeEditable = isSegmentation && layer.tracingId == null && this.props.controlMode === "TRACE"; const isVolumeTracing = isSegmentation ? layer.tracingId != null : false; @@ -683,7 +695,12 @@ class DatasetSettings extends React.PureComponent { ? { label: (
- {this.getDeleteAnnotationLayerDropdownOption(readableName, layer)} + {this.getDeleteAnnotationLayerDropdownOption( + readableName, + layerType, + layer.tracingId, + layer, + )}
), key: "deleteAnnotationLayer", @@ -1136,21 +1153,12 @@ class DatasetSettings extends React.PureComponent { } return ( - - this.showDownsampleVolumeModal(volumeTracing)}> - Magnification Icon - + + ); }; @@ -1167,7 +1175,7 @@ class DatasetSettings extends React.PureComponent { const readableName = "Skeleton"; const skeletonTracing = enforceSkeletonTracing(tracing); const isOnlyAnnotationLayer = tracing.annotationLayers.length === 1; - const { showSkeletons } = skeletonTracing; + const { showSkeletons, tracingId } = skeletonTracing; const activeNodeRadius = getActiveNode(skeletonTracing)?.radius ?? 0; return ( @@ -1218,7 +1226,13 @@ class DatasetSettings extends React.PureComponent { }} > - {!isOnlyAnnotationLayer ? this.getDeleteAnnotationLayerButton(readableName) : null} + {!isOnlyAnnotationLayer + ? this.getDeleteAnnotationLayerButton( + readableName, + AnnotationLayerEnum.Skeleton, + tracingId, + ) + : null}
{showSkeletons ? ( @@ -1292,18 +1306,6 @@ class DatasetSettings extends React.PureComponent { ); }; - showDownsampleVolumeModal = (volumeTracing: VolumeTracing) => { - this.setState({ - volumeTracingToDownsample: volumeTracing, - }); - }; - - hideDownsampleVolumeModal = () => { - this.setState({ - volumeTracingToDownsample: null, - }); - }; - showAddVolumeLayerModal = () => { this.setState({ isAddVolumeLayerModalVisible: true, @@ -1319,8 +1321,8 @@ class DatasetSettings extends React.PureComponent { }; addSkeletonAnnotationLayer = async () => { + this.props.addSkeletonLayerToAnnotation(); await Model.ensureSavedState(); - await convertToHybridTracing(this.props.tracing.annotationId, null); location.reload(); }; @@ -1543,14 +1545,6 @@ class DatasetSettings extends React.PureComponent { ) : null} - {this.state.volumeTracingToDownsample != null ? ( - - ) : null} - {this.state.layerToMergeWithFallback != null ? ( ) => ({ reloadHistogram(layerName: string) { dispatch(reloadHistogramAction(layerName)); }, + + addSkeletonLayerToAnnotation() { + dispatch( + pushSaveQueueTransactionIsolated( + addLayerToAnnotation({ + typ: "Skeleton", + name: "skeleton", + fallbackLayerName: undefined, + }), + ), + ); + }, + + deleteAnnotationLayer(tracingId: string, type: AnnotationLayerType, layerName: string) { + dispatch(pushSaveQueueTransaction([deleteAnnotationLayer(tracingId, layerName, type)])); + }, }); const connector = connect(mapStateToProps, mapDispatchToProps); diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx index fad6a364a53..2e200281a0d 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx @@ -10,7 +10,6 @@ import { RestrictMagnificationSlider, } from "dashboard/advanced_dataset/create_explorative_modal"; import Store, { type Tracing } from "oxalis/store"; -import { addAnnotationLayer } from "admin/admin_rest_api"; import { getSomeMagInfoForDataset, getLayerByName, @@ -24,9 +23,12 @@ import { } from "oxalis/model/accessors/volumetracing_accessor"; import messages from "messages"; import InputComponent from "oxalis/view/components/input_component"; -import { api } from "oxalis/singletons"; +import { api, Model } from "oxalis/singletons"; import Toast from "libs/toast"; import { MappingStatusEnum } from "oxalis/constants"; +import { pushSaveQueueTransactionIsolated } from "oxalis/model/actions/save_actions"; +import { useDispatch } from "react-redux"; +import { addLayerToAnnotation } from "oxalis/model/sagas/update_actions"; export type ValidationResult = { isValid: boolean; message: string }; export function checkForLayerNameDuplication( @@ -114,6 +116,7 @@ export default function AddVolumeLayerModal({ const [selectedSegmentationLayerName, setSelectedSegmentationLayerName] = useState< string | undefined >(preselectedLayerName); + const dispatch = useDispatch(); const allReadableLayerNames = useMemo( () => getAllReadableLayerNames(dataset, tracing), [dataset, tracing], @@ -171,15 +174,20 @@ export default function AddVolumeLayerModal({ const maxMagAllowed = Math.max(...magInfo.getMagByIndexOrThrow(magIndices[1])); if (selectedSegmentationLayerName == null) { - await addAnnotationLayer(tracing.annotationId, tracing.annotationType, { - typ: "Volume", - name: newLayerName, - fallbackLayerName: undefined, - magRestrictions: { - min: minMagAllowed, - max: maxMagAllowed, - }, - }); + dispatch( + pushSaveQueueTransactionIsolated( + addLayerToAnnotation({ + typ: "Volume", + name: newLayerName, + fallbackLayerName: undefined, + magRestrictions: { + min: minMagAllowed, + max: maxMagAllowed, + }, + }), + ), + ); + await Model.ensureSavedState(); } else { if (selectedSegmentationLayer == null) { throw new Error("Segmentation layer is null"); @@ -198,16 +206,21 @@ export default function AddVolumeLayerModal({ maybeMappingName = mappingInfo.mappingName; } - await addAnnotationLayer(tracing.annotationId, tracing.annotationType, { - typ: "Volume", - name: newLayerName, - fallbackLayerName, - magRestrictions: { - min: minMagAllowed, - max: maxMagAllowed, - }, - mappingName: maybeMappingName, - }); + dispatch( + pushSaveQueueTransactionIsolated( + addLayerToAnnotation({ + typ: "Volume", + name: newLayerName, + fallbackLayerName, + magRestrictions: { + min: minMagAllowed, + max: maxMagAllowed, + }, + mappingName: maybeMappingName, + }), + ), + ); + await Model.ensureSavedState(); } await api.tracing.hardReload(); diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/modals/downsample_volume_modal.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/modals/downsample_volume_modal.tsx deleted file mode 100644 index 7142b602e27..00000000000 --- a/frontend/javascripts/oxalis/view/left-border-tabs/modals/downsample_volume_modal.tsx +++ /dev/null @@ -1,78 +0,0 @@ -import { Modal } from "antd"; -import { useState } from "react"; -import { AsyncButton } from "components/async_clickables"; -import type { Vector3 } from "oxalis/constants"; -import type { VolumeTracing } from "oxalis/store"; -import { api } from "oxalis/singletons"; -export default function DownsampleVolumeModal({ - hideDownsampleVolumeModal, - magsToDownsample, - volumeTracing, -}: { - hideDownsampleVolumeModal: () => void; - magsToDownsample: Array; - volumeTracing: VolumeTracing; -}) { - const [isDownsampling, setIsDownsampling] = useState(false); - - const handleTriggerDownsampling = async () => { - setIsDownsampling(true); - await api.tracing.downsampleSegmentation(volumeTracing.tracingId); - setIsDownsampling(false); - }; - - return ( - void) | null' is not assignable to ty... Remove this comment to see the full error message - onCancel={isDownsampling ? null : hideDownsampleVolumeModal} - footer={null} - width={800} - maskClosable={false} - open - > -

- This annotation does not have volume annotation data in all magnifications. Consequently, - annotation data cannot be rendered at all zoom values. By clicking "Downsample", - WEBKNOSSOS will use the best magnification of the volume data to create all dependent mags. -

-

- The following magnifications will be added when clicking "Downsample":{" "} - {magsToDownsample.map((mag) => mag.join("-")).join(", ")}. -

-
- The cause for the missing magnifications can be one of the following: -
    -
  • - The annotation was created before WEBKNOSSOS supported multi-magnification volume - tracings. -
  • -
  • An old annotation was uploaded which did not include all magnifications.
  • -
  • - The annotation was created in a task that was restricted to certain magnifications. -
  • -
  • The dataset was mutated to have more magnifications.
  • -
-
-

- Note that this action might take a few minutes. Afterwards, the annotation is reloaded. - Also, the version history of the volume data will be reset. -

-
- - Downsample - -
-
- ); -} diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment_tab_view.tsx index 0e5fd08ea85..64b0e40dc81 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment_tab_view.tsx @@ -538,7 +538,11 @@ const CommentTabViewMemo = React.memo( } const updateActions = Array.from( - cachedDiffTrees(prevPops.skeletonTracing.trees, nextProps.skeletonTracing.trees), + cachedDiffTrees( + nextProps.skeletonTracing.tracingId, + prevPops.skeletonTracing.trees, + nextProps.skeletonTracing.trees, + ), ); const relevantUpdateActions = updateActions.filter( (ua) => diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx index f9e09e57d8c..e86d6c07f6b 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx @@ -17,8 +17,10 @@ import { } from "oxalis/model/accessors/dataset_accessor"; import { getActiveMagInfo } from "oxalis/model/accessors/flycam_accessor"; import { - getCombinedStats, - type CombinedTracingStats, + getSkeletonStats, + getStats, + getVolumeStats, + type TracingStats, } from "oxalis/model/accessors/annotation_accessor"; import { setAnnotationNameAction, @@ -35,6 +37,7 @@ import { getOrganization } from "admin/admin_rest_api"; import { MarkdownModal } from "../components/markdown_modal"; import FastTooltip from "components/fast_tooltip"; import messages from "messages"; +import type { EmptyObject } from "types/globals"; type StateProps = { annotation: Tracing; @@ -203,14 +206,18 @@ export function AnnotationStats({ asInfoBlock, withMargin, }: { - stats: CombinedTracingStats; + stats: TracingStats | EmptyObject; asInfoBlock: boolean; withMargin?: boolean | null | undefined; }) { + if (!stats || Object.keys(stats).length === 0) return null; const formatLabel = (str: string) => (asInfoBlock ? str : ""); const useStyleWithMargin = withMargin != null ? withMargin : true; const styleWithLargeMarginBottom = { marginBottom: 14 }; const styleWithSmallMargin = { margin: 2 }; + const skeletonStats = getSkeletonStats(stats); + const volumeStats = getVolumeStats(stats); + const totalSegmentCount = volumeStats.reduce((sum, [_, volume]) => sum + volume.segmentCount, 0); return (
Statistics

} - {"treeCount" in stats ? ( + {skeletonStats && "treeCount" in skeletonStats ? ( Trees: ${safeNumberToStr(stats.treeCount)}

-

Nodes: ${safeNumberToStr(stats.nodeCount)}

-

Edges: ${safeNumberToStr(stats.edgeCount)}

-

Branchpoints: ${safeNumberToStr(stats.branchPointCount)}

+

Trees: ${safeNumberToStr(skeletonStats.treeCount)}

+

Nodes: ${safeNumberToStr(skeletonStats.nodeCount)}

+

Edges: ${safeNumberToStr(skeletonStats.edgeCount)}

+

Branchpoints: ${safeNumberToStr(skeletonStats.branchPointCount)}

`} wrapper="tr" > @@ -239,17 +246,18 @@ export function AnnotationStats({ />
) : null} - {"segmentCount" in stats ? ( + {volumeStats.length > 0 ? ( ) : null} @@ -294,7 +302,7 @@ export class DatasetInfoTabView extends React.PureComponent { getAnnotationStatistics() { if (this.props.isDatasetViewMode) return null; - return ; + return ; } getKeyboardShortcuts() { diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/sidebar_context_menu.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/sidebar_context_menu.tsx index 0bcfd371d6d..b163279734a 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/sidebar_context_menu.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/sidebar_context_menu.tsx @@ -40,7 +40,7 @@ type ContextMenuProps = { contextMenuPosition: [number, number] | null | undefined; hideContextMenu: () => void; menu: MenuProps | null | undefined; - className: string; // todop: should be unique? + className: string; }; export function ContextMenuContainer(props: ContextMenuProps) { diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx index f373160a65f..2de7e883f40 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx @@ -134,12 +134,12 @@ export async function importTracingFiles(files: Array, createGroupForEachF } }; - const tryParsingFileAsNml = async (file: File) => { + const tryParsingFileAsNml = async (file: File, warnAboutVolumes: boolean = true) => { try { const nmlString = await readFileAsText(file); const { trees, treeGroups, userBoundingBoxes, datasetName, containedVolumes } = await parseNml(nmlString); - if (containedVolumes) { + if (containedVolumes && warnAboutVolumes) { Toast.warning( "The NML file contained volume information which was ignored. Please upload the NML into the dashboard to create a new annotation which also contains the volume data.", ); @@ -210,7 +210,7 @@ export async function importTracingFiles(files: Array, createGroupForEachF const nmlBlob = await nmlFileEntry.getData!(new BlobWriter()); const nmlFile = new File([nmlBlob], nmlFileEntry.filename); - const nmlImportActions = await tryParsingFileAsNml(nmlFile); + const nmlImportActions = await tryParsingFileAsNml(nmlFile, false); const dataFileEntry = entries.find((entry: Entry) => Utils.isFileExtensionEqualTo(entry.filename, "zip"), @@ -240,21 +240,14 @@ export async function importTracingFiles(files: Array, createGroupForEachF tracing, oldVolumeTracing, dataFile, + tracing.version, ); - if (oldVolumeTracing) { - Store.dispatch(importVolumeTracingAction()); - Store.dispatch( - setVersionNumberAction( - oldVolumeTracing.version + 1, - "volume", - oldVolumeTracing.tracingId, - ), - ); - Store.dispatch(setLargestSegmentIdAction(newLargestSegmentId)); - await clearCache(dataset, oldVolumeTracing.tracingId); - await api.data.reloadBuckets(oldVolumeTracing.tracingId); - } + Store.dispatch(importVolumeTracingAction()); + Store.dispatch(setVersionNumberAction(tracing.version + 1)); + Store.dispatch(setLargestSegmentIdAction(newLargestSegmentId)); + await clearCache(dataset, oldVolumeTracing.tracingId); + await api.data.reloadBuckets(oldVolumeTracing.tracingId); } await reader.close(); diff --git a/frontend/javascripts/oxalis/view/version_entry.tsx b/frontend/javascripts/oxalis/view/version_entry.tsx index c792f7171d6..6b414d7c234 100644 --- a/frontend/javascripts/oxalis/view/version_entry.tsx +++ b/frontend/javascripts/oxalis/view/version_entry.tsx @@ -36,14 +36,22 @@ import type { DeleteSegmentUpdateAction, MoveTreeComponentUpdateAction, MergeTreeUpdateAction, + UpdateAnnotationLayerNameUpdateAction, UpdateMappingNameUpdateAction, DeleteSegmentDataUpdateAction, + AddLayerToAnnotationUpdateAction, + DeleteAnnotationLayerUpdateAction, + UpdateMetadataOfAnnotationUpdateAction, + UpdateBucketUpdateAction, + UpdateSegmentGroupsUpdateAction, + AddSegmentIndexUpdateAction, } from "oxalis/model/sagas/update_actions"; import FormattedDate from "components/formatted_date"; import { MISSING_GROUP_ID } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; import { useSelector } from "react-redux"; -import type { OxalisState } from "oxalis/store"; +import type { HybridTracing, OxalisState } from "oxalis/store"; import { formatUserName, getContributorById } from "oxalis/model/accessors/user_accessor"; +import { getReadableNameByVolumeTracingId } from "oxalis/model/accessors/volumetracing_accessor"; type Description = { description: string; icon: React.ReactNode; @@ -56,7 +64,10 @@ const updateTracingDescription = { // determines the order in which update actions are checked // to describe an update action batch. See also the comment // of the `getDescriptionForBatch` function. -const descriptionFns: Record Description> = { +const descriptionFns: Record< + ServerUpdateAction["name"], + (firstAction: any, actionCount: number, tracing: HybridTracing) => Description +> = { importVolumeTracing: (): Description => ({ description: "Imported a volume tracing.", icon: , @@ -65,7 +76,11 @@ const descriptionFns: Record Descr description: "Created the annotation.", icon: , }), - updateUserBoundingBoxes: (): Description => ({ + updateUserBoundingBoxesInSkeletonTracing: (): Description => ({ + description: "Updated a bounding box.", + icon: , + }), + updateUserBoundingBoxesInVolumeTracing: (): Description => ({ description: "Updated a bounding box.", icon: , }), @@ -80,14 +95,32 @@ const descriptionFns: Record Descr : "Deactivated the active mapping.", icon: , }), - splitAgglomerate: (action: SplitAgglomerateUpdateAction): Description => ({ - description: `Split agglomerate ${action.value.agglomerateId} by separating the segments at position ${action.value.segmentPosition1} and ${action.value.segmentPosition2}.`, - icon: , - }), - mergeAgglomerate: (action: MergeAgglomerateUpdateAction): Description => ({ - description: `Merged agglomerates ${action.value.agglomerateId1} and ${action.value.agglomerateId2} by combining the segments at position ${action.value.segmentPosition1} and ${action.value.segmentPosition2}.`, - icon: , - }), + splitAgglomerate: (action: SplitAgglomerateUpdateAction): Description => { + const segment1Description = + action.value.segmentPosition1 != null + ? `at position ${action.value.segmentPosition1}` + : action.value.segmentId1 ?? "unknown"; + const segment2Description = + action.value.segmentPosition2 ?? action.value.segmentId1 ?? "unknown"; + const description = `Split agglomerate ${action.value.agglomerateId} by separating the segments ${segment1Description} and ${segment2Description}.`; + return { + description, + icon: , + }; + }, + mergeAgglomerate: (action: MergeAgglomerateUpdateAction): Description => { + const segment1Description = + action.value.segmentPosition1 != null + ? `at position ${action.value.segmentPosition1}` + : action.value.segmentId1 ?? "unknown"; + const segment2Description = + action.value.segmentPosition2 ?? action.value.segmentId1 ?? "unknown"; + const description = `Merged agglomerates ${action.value.agglomerateId1} and ${action.value.agglomerateId2} by combining the segments ${segment1Description} and ${segment2Description}.`; + return { + description, + icon: , + }; + }, deleteTree: (action: DeleteTreeUpdateAction, count: number): Description => ({ description: count > 1 ? `Deleted ${count} trees.` : `Deleted the tree with id ${action.value.id}.`, @@ -118,14 +151,28 @@ const descriptionFns: Record Descr description: `Updated the tree with id ${action.value.id}.`, icon: , }), - updateBucket: (): Description => ({ - description: "Updated the segmentation.", - icon: , - }), - updateSegmentGroups: (): Description => ({ - description: "Updated the segment groups.", - icon: , - }), + updateBucket: ( + firstAction: UpdateBucketUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Updated the segmentation of layer ${layerName}.`, + icon: , + }; + }, + updateSegmentGroups: ( + firstAction: UpdateSegmentGroupsUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Updated the segment groups of layer ${layerName}.`, + icon: , + }; + }, updateNode: (action: UpdateNodeUpdateAction): Description => ({ description: `Updated the node with id ${action.value.id}.`, icon: , @@ -156,26 +203,61 @@ const descriptionFns: Record Descr description: "Updated the 3D view.", icon: , }), - createSegment: (action: CreateSegmentUpdateAction): Description => ({ - description: `Added the segment with id ${action.value.id} to the segments list.`, - icon: , - }), - updateSegment: (action: UpdateSegmentUpdateAction): Description => ({ - description: `Updated the segment with id ${action.value.id} in the segments list.`, - icon: , - }), - deleteSegment: (action: DeleteSegmentUpdateAction): Description => ({ - description: `Deleted the segment with id ${action.value.id} from the segments list.`, - icon: , - }), - deleteSegmentData: (action: DeleteSegmentDataUpdateAction): Description => ({ - description: `Deleted the data of segment ${action.value.id}. All voxels with that id were overwritten with 0.`, - icon: , - }), - addSegmentIndex: (): Description => ({ - description: "Added segment index to enable segment statistics.", - icon: , - }), + createSegment: ( + firstAction: CreateSegmentUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Added the segment with id ${firstAction.value.id} to the segments list of layer ${layerName}.`, + icon: , + }; + }, + updateSegment: ( + firstAction: UpdateSegmentUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Updated the segment with id ${firstAction.value.id} in the segments list of layer ${layerName}.`, + icon: , + }; + }, + deleteSegment: ( + firstAction: DeleteSegmentUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Deleted the segment with id ${firstAction.value.id} from the segments list of layer ${layerName}.`, + icon: , + }; + }, + deleteSegmentData: ( + firstAction: DeleteSegmentDataUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Deleted the data of segment ${firstAction.value.id} of layer ${layerName}. All voxels with that id were overwritten with 0.`, + icon: , + }; + }, + addSegmentIndex: ( + firstAction: AddSegmentIndexUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Added segment index to layer ${layerName} to enable segment statistics.`, + icon: , + }; + }, // This should never be shown since currently this update action can only be triggered // by merging or splitting trees which is recognized separately, before this description // is accessed. @@ -188,20 +270,47 @@ const descriptionFns: Record Descr description: `Merged the trees with id ${action.value.sourceId} and ${action.value.targetId}.`, icon: , }), - updateTracing: (): Description => updateTracingDescription, -}; + updateSkeletonTracing: (): Description => updateTracingDescription, + updateVolumeTracing: (): Description => updateTracingDescription, + addLayerToAnnotation: (action: AddLayerToAnnotationUpdateAction): Description => ({ + description: `Added the layer ${action.value.layerParameters.name} to the annotation.`, + icon: , + }), + deleteLayerFromAnnotation: (action: DeleteAnnotationLayerUpdateAction): Description => ({ + description: `Deleted the layer with id ${action.value.layerName} (${action.value.tracingId}) from the annotation.`, + icon: , + }), + updateLayerMetadata: (action: UpdateAnnotationLayerNameUpdateAction): Description => ({ + description: `Updated the name of the layer with id ${action.value.tracingId} to ${action.value.layerName}.`, + icon: , + }), + updateMetadataOfAnnotation: (action: UpdateMetadataOfAnnotationUpdateAction): Description => { + return { + description: `Updated the description of the annotation to: ${action.value.description.slice(0, 100) || ""}`, + icon: , + }; + }, +} as const; + +function maybeGetReadableVolumeTracingName(tracing: HybridTracing, tracingId: string): string { + const volumeTracing = tracing.volumes.find((volume) => volume.tracingId === tracingId); + return volumeTracing != null + ? getReadableNameByVolumeTracingId(tracing, volumeTracing.tracingId) + : ""; +} function getDescriptionForSpecificBatch( actions: Array, type: string, + tracing: HybridTracing, ): Description { const firstAction = actions[0]; if (firstAction.name !== type) { throw new Error("Type constraint violated"); } - - return descriptionFns[type](firstAction, actions.length); + const fn = descriptionFns[type]; + return fn(firstAction, actions.length, tracing); } // An update action batch can consist of more than one update action as a single user action @@ -215,7 +324,10 @@ function getDescriptionForSpecificBatch( // "more expressive" update actions first and for more general ones later. // The order is determined by the order in which the update actions are added to the // `descriptionFns` object. -function getDescriptionForBatch(actions: Array): Description { +function getDescriptionForBatch( + actions: Array, + tracing: HybridTracing, +): Description { const groupedUpdateActions = _.groupBy(actions, "name"); const moveTreeComponentUAs = groupedUpdateActions.moveTreeComponent; @@ -265,7 +377,7 @@ function getDescriptionForBatch(actions: Array): Description const updateActions = groupedUpdateActions[key]; if (updateActions != null) { - return getDescriptionForSpecificBatch(updateActions, key); + return getDescriptionForSpecificBatch(updateActions, key, tracing); } } @@ -297,6 +409,7 @@ export default function VersionEntry({ const contributors = useSelector((state: OxalisState) => state.tracing.contributors); const activeUser = useSelector((state: OxalisState) => state.activeUser); const owner = useSelector((state: OxalisState) => state.tracing.owner); + const tracing = useSelector((state: OxalisState) => state.tracing); const liClassName = classNames("version-entry", { "active-version-entry": isActive, @@ -312,7 +425,7 @@ export default function VersionEntry({ {allowUpdate ? "Restore" : "Download"} ); - const { description, icon } = getDescriptionForBatch(actions); + const { description, icon } = getDescriptionForBatch(actions, tracing); // In case the actionAuthorId is not set, the action was created before the multi-contributor // support. Default to the owner in that case. diff --git a/frontend/javascripts/oxalis/view/version_list.tsx b/frontend/javascripts/oxalis/view/version_list.tsx index be21599922e..c6dba3508cc 100644 --- a/frontend/javascripts/oxalis/view/version_list.tsx +++ b/frontend/javascripts/oxalis/view/version_list.tsx @@ -3,17 +3,16 @@ import { useState, useEffect } from "react"; import _ from "lodash"; import dayjs from "dayjs"; import type { APIUpdateActionBatch } from "types/api_flow_types"; -import type { Versions } from "oxalis/view/version_view"; import { chunkIntoTimeWindows } from "libs/utils"; import { getUpdateActionLog, downloadAnnotation, - getNewestVersionForTracing, + getNewestVersionForAnnotation, + getAnnotationProto, } from "admin/admin_rest_api"; import { handleGenericError } from "libs/error_handling"; import { - pushSaveQueueTransaction, - type SaveQueueType, + pushSaveQueueTransactionIsolated, setVersionNumberAction, } from "oxalis/model/actions/save_actions"; import { @@ -24,21 +23,19 @@ import { import { setAnnotationAllowUpdateAction } from "oxalis/model/actions/annotation_actions"; import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; import { Model } from "oxalis/singletons"; -import type { EditableMapping, OxalisState, SkeletonTracing, VolumeTracing } from "oxalis/store"; +import type { HybridTracing, OxalisState } from "oxalis/store"; import Store from "oxalis/store"; import VersionEntryGroup from "oxalis/view/version_entry_group"; import { api } from "oxalis/singletons"; -import Toast from "libs/toast"; import { useInfiniteQuery, useQueryClient } from "@tanstack/react-query"; import { useEffectOnlyOnce } from "libs/react_hooks"; import { useFetch } from "libs/react_helpers"; import { useSelector } from "react-redux"; +import { getCreationTimestamp } from "oxalis/model/accessors/annotation_accessor"; const ENTRIES_PER_PAGE = 5000; type Props = { - versionedObjectType: SaveQueueType; - tracing: SkeletonTracing | VolumeTracing | EditableMapping; allowUpdate: boolean; }; @@ -49,25 +46,32 @@ type GroupedAndChunkedVersions = Record l.tracingId), + annotationLayers.map((l) => l.tracingId), + ) + ) { + const params = new URLSearchParams(); + params.append("showVersionRestore", "true"); + params.append("version", `${version}`); + location.href = `${location.origin}/annotations/${annotationId}?${params}${location.hash}`; + + return; + } + + await api.tracing.restart(null, annotationId, controlMode, version); Store.dispatch(setAnnotationAllowUpdateAction(false)); const segmentationLayersToReload = []; - if (versions == null) { - // No versions were passed which means that the newest annotation should be - // shown. Therefore, reload all segmentation layers. - segmentationLayersToReload.push(...Model.getSegmentationTracingLayers()); - } else if (versions.volumes != null) { - // Since volume versions were specified, reload the volumeTracing layers - const versionedSegmentationLayers = Object.keys(versions.volumes).map((volumeTracingId) => - Model.getSegmentationTracingLayer(volumeTracingId), - ); - segmentationLayersToReload.push(...versionedSegmentationLayers); - } + segmentationLayersToReload.push(...Model.getSegmentationTracingLayers()); for (const segmentationLayer of segmentationLayersToReload) { segmentationLayer.cube.collectAllBuckets(); @@ -80,50 +84,17 @@ async function handleRestoreVersion( versions: APIUpdateActionBatch[], version: number, ) { - const getNewestVersion = () => _.max(versions.map((batch) => batch.version)) || 0; if (props.allowUpdate) { - Store.dispatch( - setVersionNumberAction( - getNewestVersion(), - props.versionedObjectType, - props.tracing.tracingId, - ), - ); - Store.dispatch( - pushSaveQueueTransaction( - [revertToVersion(version)], - props.versionedObjectType, - props.tracing.tracingId, - ), - ); + const newestVersion = _.max(versions.map((batch) => batch.version)) || 0; + Store.dispatch(setVersionNumberAction(newestVersion)); + Store.dispatch(pushSaveQueueTransactionIsolated(revertToVersion(version))); await Model.ensureSavedState(); Store.dispatch(setVersionRestoreVisibilityAction(false)); Store.dispatch(setAnnotationAllowUpdateAction(true)); } else { const { annotationType, annotationId, volumes } = Store.getState().tracing; const includesVolumeFallbackData = volumes.some((volume) => volume.fallbackLayer != null); - downloadAnnotation(annotationId, annotationType, includesVolumeFallbackData, { - [props.versionedObjectType]: version, - }); - } -} - -function handlePreviewVersion(props: Props, version: number) { - if (props.versionedObjectType === "skeleton") { - return previewVersion({ - skeleton: version, - }); - } else if (props.versionedObjectType === "volume") { - return previewVersion({ - volumes: { - [props.tracing.tracingId]: version, - }, - }); - } else { - Toast.warning( - `Version preview and restoring for ${props.versionedObjectType}s is not supported yet.`, - ); - return Promise.resolve(); + downloadAnnotation(annotationId, annotationType, includesVolumeFallbackData, version); } } @@ -146,10 +117,10 @@ const getGroupedAndChunkedVersions = _.memoize( ); async function getUpdateActionLogPage( - props: Props, + tracing: HybridTracing, tracingStoreUrl: string, - tracingId: string, - versionedObjectType: SaveQueueType, + annotationId: string, + earliestAccessibleVersion: number, newestVersion: number, // 0 is the "newest" page (i.e., the page in which the newest version is) relativePageNumber: number, @@ -163,22 +134,24 @@ async function getUpdateActionLogPage( // For example, the following parameters would be a valid variable set // (assuming ENTRIES_PER_PAGE = 2): // newestVersion = 23 - // relativePageNumber = 1 - // absolutePageNumber = ⌊11.5⌋ - 1 = 10 - // newestVersion = 22 - // oldestVersion = 21 + // relativePageNumber = 1 (0 is the newest, 1 is the second newest) + // absolutePageNumber = ⌊23/2⌋ - 1 = 10 + // newestVersionInPage = 22 + // oldestVersionInPage = 21 // Thus, versions 21 and 22 will be fetched for the second newest page const absolutePageNumber = Math.floor(newestVersion / ENTRIES_PER_PAGE) - relativePageNumber; if (absolutePageNumber < 0) { throw new Error("Negative absolute page number received."); } const newestVersionInPage = (1 + absolutePageNumber) * ENTRIES_PER_PAGE; - const oldestVersionInPage = absolutePageNumber * ENTRIES_PER_PAGE + 1; + const oldestVersionInPage = Math.max( + absolutePageNumber * ENTRIES_PER_PAGE + 1, + earliestAccessibleVersion, + ); const updateActionLog = await getUpdateActionLog( tracingStoreUrl, - tracingId, - versionedObjectType, + annotationId, oldestVersionInPage, newestVersionInPage, ); @@ -188,12 +161,15 @@ async function getUpdateActionLogPage( if (oldestVersionInPage === 1) { updateActionLog.push({ version: 0, - value: [serverCreateTracing(props.tracing.createdTimestamp)], + value: [serverCreateTracing(getCreationTimestamp(tracing))], }); } // nextPage will contain older versions - const nextPage = oldestVersionInPage > 1 ? relativePageNumber + 1 : undefined; + const nextPage = + oldestVersionInPage > Math.max(earliestAccessibleVersion, 1) + ? relativePageNumber + 1 + : undefined; // previousPage will contain newer versions const previousPage = newestVersion > newestVersionInPage ? relativePageNumber - 1 : undefined; @@ -201,13 +177,18 @@ async function getUpdateActionLogPage( } function VersionList(props: Props) { - const { tracing } = props; const tracingStoreUrl = useSelector((state: OxalisState) => state.tracing.tracingStore.url); + const annotationId = useSelector((state: OxalisState) => state.tracing.annotationId); const newestVersion = useFetch( - () => getNewestVersionForTracing(tracingStoreUrl, tracing.tracingId, props.versionedObjectType), + async () => { + if (annotationId === "") { + return null; + } + return getNewestVersionForAnnotation(tracingStoreUrl, annotationId); + }, null, - [tracing], + [annotationId], ); if (newestVersion == null) { @@ -222,31 +203,32 @@ function VersionList(props: Props) { } function InnerVersionList(props: Props & { newestVersion: number }) { + const tracing = useSelector((state: OxalisState) => state.tracing); const queryClient = useQueryClient(); // Remember the version with which the version view was opened ( // the active version could change by the actions of the user). // Based on this version, the page numbers are calculated. const { newestVersion } = props; - const [initialVersion] = useState(props.tracing.version); + const [initialVersion] = useState(tracing.version); function fetchPaginatedVersions({ pageParam }: { pageParam?: number }) { if (pageParam == null) { pageParam = Math.floor((newestVersion - initialVersion) / ENTRIES_PER_PAGE); } - const { tracingId } = props.tracing; const { url: tracingStoreUrl } = Store.getState().tracing.tracingStore; + const { annotationId, earliestAccessibleVersion } = Store.getState().tracing; return getUpdateActionLogPage( - props, + tracing, tracingStoreUrl, - tracingId, - props.versionedObjectType, + annotationId, + earliestAccessibleVersion, newestVersion, pageParam, ); } - const queryKey = ["versions", props.tracing.tracingId]; + const queryKey = ["versions", tracing.annotationId]; useEffectOnlyOnce(() => { // Remove all previous existent queries so that the content of this view @@ -338,24 +320,28 @@ function InnerVersionList(props: Props & { newestVersion: number }) { batches={batchesOrDateString} allowUpdate={props.allowUpdate} newestVersion={flattenedVersions[0].version} - activeVersion={props.tracing.version} + activeVersion={tracing.version} onRestoreVersion={(version) => handleRestoreVersion(props, flattenedVersions, version) } - onPreviewVersion={(version) => handlePreviewVersion(props, version)} + onPreviewVersion={(version) => previewVersion(version)} key={batchesOrDateString[0].version} /> ) } /> )} - {hasNextPage && ( + {hasNextPage ? (
- )} + ) : tracing.earliestAccessibleVersion > 0 ? ( +
+ Cannot show versions earlier than {tracing.earliestAccessibleVersion}. +
+ ) : null} ); } diff --git a/frontend/javascripts/oxalis/view/version_view.tsx b/frontend/javascripts/oxalis/view/version_view.tsx index 8068ad59be6..e755a6c6f38 100644 --- a/frontend/javascripts/oxalis/view/version_view.tsx +++ b/frontend/javascripts/oxalis/view/version_view.tsx @@ -1,14 +1,14 @@ -import { Button, Alert, Tabs, type TabsProps } from "antd"; +import { Button, Alert } from "antd"; import { CloseOutlined } from "@ant-design/icons"; -import { connect } from "react-redux"; +import { connect, useDispatch } from "react-redux"; import * as React from "react"; -import { getReadableNameByVolumeTracingId } from "oxalis/model/accessors/volumetracing_accessor"; import { setAnnotationAllowUpdateAction } from "oxalis/model/actions/annotation_actions"; import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; import type { OxalisState, Tracing } from "oxalis/store"; -import { type TracingType, TracingTypeEnum } from "types/api_flow_types"; import Store from "oxalis/store"; import VersionList, { previewVersion } from "oxalis/view/version_list"; +import { useState } from "react"; +import { useWillUnmount } from "beautiful-react-hooks"; export type Versions = { skeleton?: number | null | undefined; @@ -21,151 +21,84 @@ type OwnProps = { allowUpdate: boolean; }; type Props = StateProps & OwnProps; -type State = { - activeTracingType: TracingType; - initialAllowUpdate: boolean; -}; -class VersionView extends React.Component { - state: State = { - activeTracingType: - this.props.tracing.skeleton != null ? TracingTypeEnum.skeleton : TracingTypeEnum.volume, - // Remember whether the tracing could originally be updated - initialAllowUpdate: this.props.allowUpdate, - }; +const VersionView: React.FC = (props: Props) => { + const [initialAllowUpdate] = useState(props.allowUpdate); + const dispatch = useDispatch(); - componentWillUnmount() { - Store.dispatch(setAnnotationAllowUpdateAction(this.state.initialAllowUpdate)); - } + useWillUnmount(() => { + dispatch(setAnnotationAllowUpdateAction(initialAllowUpdate)); + }); - handleClose = async () => { + const handleClose = async () => { // This will load the newest version of both skeleton and volume tracings await previewVersion(); Store.dispatch(setVersionRestoreVisibilityAction(false)); - Store.dispatch(setAnnotationAllowUpdateAction(this.state.initialAllowUpdate)); - }; - - onChangeTab = (activeKey: string) => { - this.setState({ - activeTracingType: activeKey as TracingType, - }); + Store.dispatch(setAnnotationAllowUpdateAction(initialAllowUpdate)); }; - render() { - const tabs: TabsProps["items"] = []; - - if (this.props.tracing.skeleton != null) - tabs.push({ - label: "Skeleton", - key: "skeleton", - children: ( - - ), - }); - - tabs.push( - ...this.props.tracing.volumes.map((volumeTracing) => ({ - label: getReadableNameByVolumeTracingId(this.props.tracing, volumeTracing.tracingId), - key: volumeTracing.tracingId, - children: ( - - ), - })), - ); - - tabs.push( - ...this.props.tracing.mappings.map((mapping) => ({ - label: `${getReadableNameByVolumeTracingId( - this.props.tracing, - mapping.tracingId, - )} (Editable Mapping)`, - key: `${mapping.tracingId}-${mapping.mappingName}`, - children: ( - - ), - })), - ); - - return ( + return ( +
-
-

- Version History -

-
+ Version History + +
- ); - } -} +
+ +
+
+ ); +}; function mapStateToProps(state: OxalisState): StateProps { return { diff --git a/frontend/javascripts/router.tsx b/frontend/javascripts/router.tsx index 1f3556672f1..a016e394a8f 100644 --- a/frontend/javascripts/router.tsx +++ b/frontend/javascripts/router.tsx @@ -1,4 +1,8 @@ -import { createExplorational, getAnnotationInformation, getShortLink } from "admin/admin_rest_api"; +import { + createExplorational, + getUnversionedAnnotationInformation, + getShortLink, +} from "admin/admin_rest_api"; import AcceptInviteView from "admin/auth/accept_invite_view"; import AuthTokenView from "admin/auth/auth_token_view"; import ChangePasswordView from "admin/auth/change_password_view"; @@ -242,7 +246,9 @@ class ReactRouter extends React.Component { serverAuthenticationCallback = async ({ match }: ContextRouter) => { try { - const annotationInformation = await getAnnotationInformation(match.params.id || ""); + const annotationInformation = await getUnversionedAnnotationInformation( + match.params.id || "", + ); return annotationInformation.visibility === "Public"; } catch (_ex) { // Annotation could not be found diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index 61833c1989e..c79912ffa93 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -6,13 +6,10 @@ import { writeTypeCheckingFile, } from "test/e2e-setup"; import type { APIAnnotation } from "types/api_flow_types"; -import { APIAnnotationTypeEnum } from "types/api_flow_types"; +import { AnnotationLayerEnum, APIAnnotationTypeEnum } from "types/api_flow_types"; import { createTreeMapFromTreeArray } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; import { diffTrees } from "oxalis/model/sagas/skeletontracing_saga"; -import { - getNullableSkeletonTracing, - getSkeletonDescriptor, -} from "oxalis/model/accessors/skeletontracing_accessor"; +import { getNullableSkeletonTracing } from "oxalis/model/accessors/skeletontracing_accessor"; import { getServerVolumeTracings } from "oxalis/model/accessors/volumetracing_accessor"; import { sendRequestWithToken, addVersionNumbers } from "oxalis/model/sagas/save_saga"; import * as UpdateActions from "oxalis/model/sagas/update_actions"; @@ -20,7 +17,10 @@ import * as api from "admin/admin_rest_api"; import generateDummyTrees from "oxalis/model/helpers/generate_dummy_trees"; import test from "ava"; import { createSaveQueueFromUpdateActions } from "../helpers/saveHelpers"; +import type { SaveQueueEntry } from "oxalis/store"; + const datasetId = "59e9cfbdba632ac2ab8b23b3"; + process.on("unhandledRejection", (err, promise) => { console.error("Unhandled rejection (promise: ", promise, ", reason: ", err, ")."); }); @@ -30,7 +30,7 @@ test.before("Reset database", async () => { }); test("getAnnotationInformation()", async (t) => { const annotationId = "570ba0092a7c0e980056fe9b"; - const annotation = await api.getAnnotationInformation(annotationId); + const annotation = await api.getUnversionedAnnotationInformation(annotationId); t.is(annotation.id, annotationId); writeTypeCheckingFile(annotation, "annotation", "APIAnnotation"); t.snapshot(annotation); @@ -38,7 +38,7 @@ test("getAnnotationInformation()", async (t) => { test("getAnnotationInformation() for public annotation while logged out", async (t) => { setCurrToken("invalidToken"); const annotationId = "88135c192faeb34c0081c05d"; - const annotation = await api.getAnnotationInformation(annotationId); + const annotation = await api.getUnversionedAnnotationInformation(annotationId); t.is(annotation.id, annotationId); t.snapshot(annotation); setCurrToken(tokenUserA); @@ -73,35 +73,30 @@ test.serial("finishAnnotation() and reOpenAnnotation() for explorational", async }); test.serial("editAnnotation()", async (t) => { const annotationId = "68135c192faeb34c0081c05d"; - const originalAnnotation = await api.getAnnotationInformation(annotationId); - const { name, visibility, description } = originalAnnotation; + const originalAnnotation = await api.getUnversionedAnnotationInformation(annotationId); + const { visibility } = originalAnnotation; const newName = "new name"; const newVisibility = "Public"; - const newDescription = "new description"; await api.editAnnotation(annotationId, APIAnnotationTypeEnum.Explorational, { - name: newName, visibility: newVisibility, - description: newDescription, + name: newName, }); - const editedAnnotation = await api.getAnnotationInformation(annotationId); + const editedAnnotation = await api.getUnversionedAnnotationInformation(annotationId); t.is(editedAnnotation.name, newName); t.is(editedAnnotation.visibility, newVisibility); - t.is(editedAnnotation.description, newDescription); t.is(editedAnnotation.id, annotationId); - t.is(editedAnnotation.annotationLayers[0].typ, "Skeleton"); + t.is(editedAnnotation.annotationLayers[0].typ, AnnotationLayerEnum.Skeleton); t.is(editedAnnotation.annotationLayers[0].tracingId, "ae417175-f7bb-4a34-8187-d9c3b50143af"); t.snapshot(replaceVolatileValues(editedAnnotation)); await api.editAnnotation(annotationId, APIAnnotationTypeEnum.Explorational, { - name, visibility, - description, }); }); test.serial("finishAllAnnotations()", async (t) => { const annotationIds = ["78135c192faeb34c0081c05d", "78135c192faeb34c0081c05e"]; await api.finishAllAnnotations(annotationIds); const finishedAnnotations = await Promise.all( - annotationIds.map((id) => api.getAnnotationInformation(id)), + annotationIds.map((id) => api.getUnversionedAnnotationInformation(id)), ); t.is(finishedAnnotations.length, 2); finishedAnnotations.forEach((annotation) => { @@ -115,7 +110,7 @@ test.serial("createExplorational() and finishAnnotation()", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); t.snapshot(replaceVolatileValues(createdExplorational)); await api.finishAnnotation(createdExplorational.id, APIAnnotationTypeEnum.Explorational); - const finishedAnnotation = await api.getAnnotationInformation(createdExplorational.id); + const finishedAnnotation = await api.getUnversionedAnnotationInformation(createdExplorational.id); t.is(finishedAnnotation.state, "Finished"); }); test.serial("getTracingsForAnnotation()", async (t) => { @@ -143,12 +138,9 @@ test.serial("getTracingsForAnnotation() for hybrid", async (t) => { }); }); -// @ts-expect-error ts-migrate(7006) FIXME: Parameter 'queue' implicitly has an 'any' type. -async function sendUpdateActionsForSkeleton(explorational: APIAnnotation, queue) { - const skeletonTracing = getSkeletonDescriptor(explorational); - if (skeletonTracing == null) throw new Error("No skeleton annotation present."); +async function sendUpdateActions(explorational: APIAnnotation, queue: SaveQueueEntry[]) { return sendRequestWithToken( - `${explorational.tracingStore.url}/tracings/skeleton/${skeletonTracing.tracingId}/update?token=`, + `${explorational.tracingStore.url}/tracings/annotation/${explorational.id}/update?token=`, { method: "POST", data: queue, @@ -159,9 +151,11 @@ async function sendUpdateActionsForSkeleton(explorational: APIAnnotation, queue) test.serial("Send update actions and compare resulting tracing", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); + const tracingId = createdExplorational.annotationLayers[0].tracingId; const initialSkeleton = { activeNodeId: undefined, userBoundingBoxes: [], + tracingId, }; const [saveQueue] = addVersionNumbers( createSaveQueueFromUpdateActions( @@ -173,13 +167,14 @@ test.serial("Send update actions and compare resulting tracing", async (t) => { ), 0, ); - await sendUpdateActionsForSkeleton(createdExplorational, saveQueue); + await sendUpdateActions(createdExplorational, saveQueue); const tracings = await api.getTracingsForAnnotation(createdExplorational); t.snapshot(replaceVolatileValues(tracings[0])); }); test("Send complex update actions and compare resulting tracing", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); - const trees = createTreeMapFromTreeArray(generateDummyTrees(5, 5)); + const { tracingId } = createdExplorational.annotationLayers[0]; + const trees = createTreeMapFromTreeArray(generateDummyTrees(5, 6)); const treeGroups = [ { groupId: 1, @@ -198,9 +193,8 @@ test("Send complex update actions and compare resulting tracing", async (t) => { ], }, ]; - - const createTreesUpdateActions = Array.from(diffTrees({}, trees)); - const updateTreeGroupsUpdateAction = UpdateActions.updateTreeGroups(treeGroups); + const createTreesUpdateActions = Array.from(diffTrees(tracingId, {}, trees)); + const updateTreeGroupsUpdateAction = UpdateActions.updateTreeGroups(treeGroups, tracingId); const [saveQueue] = addVersionNumbers( createSaveQueueFromUpdateActions( [createTreesUpdateActions, [updateTreeGroupsUpdateAction]], @@ -208,7 +202,7 @@ test("Send complex update actions and compare resulting tracing", async (t) => { ), 0, ); - await sendUpdateActionsForSkeleton(createdExplorational, saveQueue); + await sendUpdateActions(createdExplorational, saveQueue); const tracings = await api.getTracingsForAnnotation(createdExplorational); writeTypeCheckingFile(tracings[0], "tracing", "ServerSkeletonTracing"); t.snapshot(replaceVolatileValues(tracings[0])); @@ -216,8 +210,9 @@ test("Send complex update actions and compare resulting tracing", async (t) => { test("Update Metadata for Skeleton Tracing", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); - const trees = createTreeMapFromTreeArray(generateDummyTrees(5, 5)); - const createTreesUpdateActions = Array.from(diffTrees({}, trees)); + const { tracingId } = createdExplorational.annotationLayers[0]; + const trees = createTreeMapFromTreeArray(generateDummyTrees(5, 6)); + const createTreesUpdateActions = Array.from(diffTrees(tracingId, {}, trees)); const metadata = [ { key: "city", @@ -236,13 +231,32 @@ test("Update Metadata for Skeleton Tracing", async (t) => { ...trees[1], metadata, }; - const updateTreeAction = UpdateActions.updateTree(trees[1]); + + const updateTreeAction = UpdateActions.updateTree(trees[1], tracingId); const [saveQueue] = addVersionNumbers( createSaveQueueFromUpdateActions([createTreesUpdateActions, [updateTreeAction]], 123456789), 0, ); - await sendUpdateActionsForSkeleton(createdExplorational, saveQueue); + await sendUpdateActions(createdExplorational, saveQueue); const tracings = await api.getTracingsForAnnotation(createdExplorational); t.snapshot(replaceVolatileValues(tracings[0])); }); + +test.serial("Send update actions for updating metadata", async (t) => { + const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); + const newDescription = "new description"; + const [saveQueue] = addVersionNumbers( + createSaveQueueFromUpdateActions( + [[UpdateActions.updateMetadataOfAnnotation(newDescription)]], + 123456789, + ), + 0, + ); + await sendUpdateActions(createdExplorational, saveQueue); + const annotation = await api.getAnnotationProto( + createdExplorational.tracingStore.url, + createdExplorational.id, + ); + t.is(annotation.description, newDescription); +}); diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index 637a3efa3af..37e26c68c45 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -1,6 +1,13 @@ -import type { ServerSkeletonTracing, APIAnnotation } from "types/api_flow_types"; +import { + type ServerSkeletonTracing, + type APIAnnotation, + AnnotationLayerEnum, + type APITracingStoreAnnotation, +} from "types/api_flow_types"; + +const TRACING_ID = "47e37793-d0be-4240-a371-87ce68561a13"; export const tracing: ServerSkeletonTracing = { - typ: "Skeleton", + typ: AnnotationLayerEnum.Skeleton, id: "47e37793-d0be-4240-a371-87ce68561a13", trees: [ { @@ -154,7 +161,6 @@ export const tracing: ServerSkeletonTracing = { }, additionalAxes: [], zoomLevel: 2, - version: 7, }; export const annotation: APIAnnotation = { description: "", @@ -176,9 +182,9 @@ export const annotation: APIAnnotation = { }, annotationLayers: [ { - name: "Skeleton", - tracingId: "47e37793-d0be-4240-a371-87ce68561a13", - typ: "Skeleton", + name: AnnotationLayerEnum.Skeleton, + tracingId: TRACING_ID, + typ: AnnotationLayerEnum.Skeleton, stats: {}, }, ], @@ -210,3 +216,16 @@ export const annotation: APIAnnotation = { othersMayEdit: false, isLockedByOwner: false, }; + +export const annotationProto: APITracingStoreAnnotation = { + description: "skeleton-annotation-description", + version: 1, + earliestAccessibleVersion: 0, + annotationLayers: [ + { + tracingId: TRACING_ID, + name: "skeleton layer name", + typ: AnnotationLayerEnum.Skeleton, + }, + ], +}; diff --git a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts index fd198b26f92..a2755eb6507 100644 --- a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts @@ -1,5 +1,11 @@ -import type { ServerSkeletonTracing, APIAnnotation } from "types/api_flow_types"; +import { + type ServerSkeletonTracing, + type APIAnnotation, + AnnotationLayerEnum, + type APITracingStoreAnnotation, +} from "types/api_flow_types"; +const TRACING_ID = "e90133de-b2db-4912-8261-8b6f84f7edab"; export const tracing: ServerSkeletonTracing = { typ: "Skeleton", trees: [ @@ -59,7 +65,6 @@ export const tracing: ServerSkeletonTracing = { }, additionalAxes: [], zoomLevel: 2, - version: 0, id: "e90133de-b2db-4912-8261-8b6f84f7edab", }; export const annotation: APIAnnotation = { @@ -69,6 +74,7 @@ export const annotation: APIAnnotation = { id: "5b1fd1cf97000027049c67ee", name: "", description: "", + stats: {}, typ: "Task", task: { id: "5b1fd1cb97000027049c67ec", @@ -118,10 +124,11 @@ export const annotation: APIAnnotation = { allowDownload: true, }, annotationLayers: [ + // does this still exist? { name: "Skeleton", - tracingId: "e90133de-b2db-4912-8261-8b6f84f7edab", - typ: "Skeleton", + tracingId: TRACING_ID, + typ: AnnotationLayerEnum.Skeleton, stats: {}, }, ], @@ -177,3 +184,15 @@ export const annotation: APIAnnotation = { }, ], }; +export const annotationProto: APITracingStoreAnnotation = { + description: "task-annotation-description", + version: 1, + earliestAccessibleVersion: 0, + annotationLayers: [ + { + tracingId: TRACING_ID, + name: "Skeleton", + typ: AnnotationLayerEnum.Skeleton, + }, + ], +}; diff --git a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts index edd287b490f..01fcebe67bc 100644 --- a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts @@ -1,4 +1,11 @@ -import type { ServerVolumeTracing, APIAnnotation } from "types/api_flow_types"; +import { + type ServerVolumeTracing, + type APIAnnotation, + AnnotationLayerEnum, + type APITracingStoreAnnotation, +} from "types/api_flow_types"; + +const TRACING_ID = "tracingId-1234"; export const tracing: ServerVolumeTracing = { typ: "Volume", activeSegmentId: 10000, @@ -31,7 +38,6 @@ export const tracing: ServerVolumeTracing = { elementClass: "uint16", id: "segmentation", largestSegmentId: 21890, - version: 0, zoomLevel: 0, mags: [ { @@ -86,8 +92,8 @@ export const annotation: APIAnnotation = { annotationLayers: [ { name: "volume", - tracingId: "tracingId-1234", - typ: "Volume", + tracingId: TRACING_ID, + typ: AnnotationLayerEnum.Volume, stats: {}, }, ], @@ -119,3 +125,15 @@ export const annotation: APIAnnotation = { othersMayEdit: false, isLockedByOwner: false, }; +export const annotationProto: APITracingStoreAnnotation = { + description: "volume-annotation-description", + version: 1, + earliestAccessibleVersion: 0, + annotationLayers: [ + { + tracingId: TRACING_ID, + name: "volume", + typ: AnnotationLayerEnum.Volume, + }, + ], +}; diff --git a/frontend/javascripts/test/geometries/skeleton.spec.ts b/frontend/javascripts/test/geometries/skeleton.spec.ts index 4a902c8ba2b..63e5a5e45f0 100644 --- a/frontend/javascripts/test/geometries/skeleton.spec.ts +++ b/frontend/javascripts/test/geometries/skeleton.spec.ts @@ -10,6 +10,7 @@ import test from "ava"; import type { Vector3 } from "oxalis/constants"; import type { OxalisState } from "oxalis/store"; import { tracing, annotation } from "../fixtures/skeletontracing_server_objects"; +import { convertServerAnnotationToFrontendAnnotation } from "oxalis/model/reducers/reducer_helpers"; mockRequire("app", { currentUser: { @@ -38,7 +39,9 @@ test.before((t) => { const mag = 0; tracing.trees = []; delete tracing.activeNodeId; - Store.dispatch(initializeAnnotationAction(annotation)); + Store.dispatch( + initializeAnnotationAction(convertServerAnnotationToFrontendAnnotation(annotation, 0, 0)), + ); Store.dispatch(initializeSkeletonTracingAction(tracing)); // Create 20 trees with 100 nodes each diff --git a/frontend/javascripts/test/helpers/apiHelpers.ts b/frontend/javascripts/test/helpers/apiHelpers.ts index 8de61c806bf..b24e5401d52 100644 --- a/frontend/javascripts/test/helpers/apiHelpers.ts +++ b/frontend/javascripts/test/helpers/apiHelpers.ts @@ -1,4 +1,3 @@ -// @ts-nocheck import { createNanoEvents } from "nanoevents"; import type { ExecutionContext } from "ava"; import _ from "lodash"; @@ -13,16 +12,20 @@ import { setSceneController } from "oxalis/controller/scene_controller_provider" import { tracing as SKELETON_TRACING, annotation as SKELETON_ANNOTATION, + annotationProto as SKELETON_ANNOTATION_PROTO, } from "../fixtures/skeletontracing_server_objects"; import { tracing as TASK_TRACING, annotation as TASK_ANNOTATION, + annotationProto as TASK_ANNOTATION_PROTO, } from "../fixtures/tasktracing_server_objects"; import { tracing as VOLUME_TRACING, annotation as VOLUME_ANNOTATION, + annotationProto as VOLUME_ANNOTATION_PROTO, } from "../fixtures/volumetracing_server_objects"; import DATASET from "../fixtures/dataset_server_object"; +import type { ApiInterface } from "oxalis/api/api_latest"; const Request = { receiveJSON: sinon.stub(), @@ -32,8 +35,8 @@ const Request = { sendJSONReceiveArraybufferWithHeaders: sinon.stub(), always: () => Promise.resolve(), }; -export function createBucketResponseFunction(TypedArrayClass, fillValue, delay = 0) { - return async function getBucketData(_url, payload) { +export function createBucketResponseFunction(TypedArrayClass: any, fillValue: number, delay = 0) { + return async function getBucketData(_url: string, payload: { data: Array }) { const bucketCount = payload.data.length; await sleep(delay); return { @@ -46,6 +49,7 @@ export function createBucketResponseFunction(TypedArrayClass, fillValue, delay = }; } +// @ts-ignore Request.sendJSONReceiveArraybufferWithHeaders = createBucketResponseFunction(Uint8Array, 0); const ErrorHandling = { assertExtendContext: _.noop, @@ -58,6 +62,7 @@ const app = { }; const protoHelpers = { parseProtoTracing: sinon.stub(), + parseProtoAnnotation: sinon.stub(), }; export const TIMESTAMP = 1494695001688; const DateMock = { @@ -125,14 +130,17 @@ const modelData = { skeleton: { tracing: SKELETON_TRACING, annotation: SKELETON_ANNOTATION, + annotationProto: SKELETON_ANNOTATION_PROTO, }, volume: { tracing: VOLUME_TRACING, annotation: VOLUME_ANNOTATION, + annotationProto: VOLUME_ANNOTATION_PROTO, }, task: { tracing: TASK_TRACING, annotation: TASK_ANNOTATION, + annotationProto: TASK_ANNOTATION_PROTO, }, }; @@ -199,6 +207,8 @@ export function __setupOxalis( // each __setupOxalis call would overwrite the current stub to receiveJSON. .onCall(counter++) .returns(Promise.resolve(datasetClone)); + + protoHelpers.parseProtoAnnotation.returns(_.cloneDeep(modelData[mode].annotationProto)); protoHelpers.parseProtoTracing.returns(_.cloneDeep(modelData[mode].tracing)); Request.receiveJSON .withArgs("/api/userToken/generate", { @@ -219,11 +229,12 @@ export function __setupOxalis( setSceneController({ name: "This is a dummy scene controller so that getSceneController works in the tests.", + // @ts-ignore segmentMeshController: { meshesGroupsPerSegmentId: {} }, }); return Model.fetch( - ANNOTATION_TYPE, + null, // no compound annotation { annotationId: ANNOTATION_ID, type: ControlModeEnum.TRACE, @@ -233,11 +244,11 @@ export function __setupOxalis( .then(() => { // Trigger the event ourselves, as the OxalisController is not instantiated app.vent.emit("webknossos:ready"); - webknossos.apiReady(apiVersion).then((apiObject) => { + webknossos.apiReady(apiVersion).then((apiObject: ApiInterface) => { t.context.api = apiObject; }); }) - .catch((error) => { + .catch((error: { message: string }) => { console.error("model.fetch() failed", error); t.fail(error.message); }); diff --git a/frontend/javascripts/test/helpers/saveHelpers.ts b/frontend/javascripts/test/helpers/saveHelpers.ts index 53ba1f35865..d91548d9195 100644 --- a/frontend/javascripts/test/helpers/saveHelpers.ts +++ b/frontend/javascripts/test/helpers/saveHelpers.ts @@ -1,10 +1,10 @@ import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "oxalis/model/sagas/update_actions"; import type { SaveQueueEntry } from "oxalis/store"; import dummyUser from "test/fixtures/dummy_user"; export function createSaveQueueFromUpdateActions( - updateActions: UpdateAction[][], + updateActions: UpdateActionWithoutIsolationRequirement[][], timestamp: number, stats: TracingStats | null = null, ): SaveQueueEntry[] { @@ -12,7 +12,7 @@ export function createSaveQueueFromUpdateActions( version: -1, timestamp, stats, - actions: ua.slice(), + actions: ua, info: "[]", transactionGroupCount: 1, authorId: dummyUser.id, @@ -20,9 +20,15 @@ export function createSaveQueueFromUpdateActions( transactionId: "dummyRequestId", })); } -export function withoutUpdateTracing(items: Array): Array { - return items.filter((item) => item.name !== "updateTracing"); +export function withoutUpdateTracing( + items: Array, +): Array { + return items.filter( + (item) => item.name !== "updateSkeletonTracing" && item.name !== "updateVolumeTracing", + ); } -export function withoutUpdateTree(items: Array): Array { +export function withoutUpdateTree( + items: Array, +): Array { return items.filter((item) => item.name !== "updateTree"); } diff --git a/frontend/javascripts/test/libs/nml.spec.ts b/frontend/javascripts/test/libs/nml.spec.ts index e73937386f7..0b9a14f398c 100644 --- a/frontend/javascripts/test/libs/nml.spec.ts +++ b/frontend/javascripts/test/libs/nml.spec.ts @@ -45,7 +45,6 @@ const initialSkeletonTracing: SkeletonTracing = { type: "skeleton", createdTimestamp: 0, tracingId: "tracingId", - version: 0, cachedMaxNodeId: 7, trees: { "1": { diff --git a/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts b/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts index 509469042b2..a00c2ce1dea 100644 --- a/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts +++ b/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts @@ -8,6 +8,7 @@ import sinon from "sinon"; import test from "ava"; import { MagInfo } from "oxalis/model/helpers/mag_info"; import type { APIDataLayer } from "types/api_flow_types"; +import type { PushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; const RequestMock = { always: (promise: Promise, func: (v: any) => any) => promise.then(func, func), @@ -21,8 +22,10 @@ function setFourBit(bool: boolean) { _fourBit = bool; } +const tracingId = "tracingId"; const mockedCube = { isSegmentation: true, + layerName: tracingId, magInfo: new MagInfo([ [1, 1, 1], [2, 2, 2], @@ -232,6 +235,7 @@ test.serial( setFourBit(false); }, ); + test.serial("sendToStore: Request Handling should send the correct request parameters", (t) => { const data = new Uint8Array(2); const bucket1 = new DataBucket("uint8", [0, 0, 0, 0], null, mockedCube); @@ -243,13 +247,13 @@ test.serial("sendToStore: Request Handling should send the correct request param const batch = [bucket1, bucket2]; const getBucketData = sinon.stub(); getBucketData.returns(data); - const tracingId = "tracingId"; - const expectedSaveQueueItems = { + const expectedSaveQueueItems: PushSaveQueueTransaction = { type: "PUSH_SAVE_QUEUE_TRANSACTION", items: [ { name: "updateBucket", value: { + actionTracingId: tracingId, position: [0, 0, 0], additionalCoordinates: undefined, mag: [1, 1, 1], @@ -260,6 +264,7 @@ test.serial("sendToStore: Request Handling should send the correct request param { name: "updateBucket", value: { + actionTracingId: tracingId, position: [64, 64, 64], additionalCoordinates: undefined, mag: [2, 2, 2], @@ -269,8 +274,6 @@ test.serial("sendToStore: Request Handling should send the correct request param }, ], transactionId: "dummyRequestId", - saveQueueType: "volume", - tracingId, }; const pushQueue = new PushQueue({ ...mockedCube, layerName: tracingId }); diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index 0a4b398f274..907fee27b34 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -2,10 +2,10 @@ import mockRequire from "mock-require"; import test from "ava"; import "test/reducers/save_reducer.mock"; import dummyUser from "test/fixtures/dummy_user"; -import type { SaveState } from "oxalis/store"; -import type { APIUser } from "types/api_flow_types"; +import type { OxalisState } from "oxalis/store"; import { createSaveQueueFromUpdateActions } from "../helpers/saveHelpers"; -import type { EmptyObject } from "types/globals"; +import type { UpdateActionWithoutIsolationRequirement } from "oxalis/model/sagas/update_actions"; + const TIMESTAMP = 1494695001688; const DateMock = { now: () => TIMESTAMP, @@ -14,98 +14,87 @@ const AccessorMock = { getStats: () => null, }; mockRequire("libs/date", DateMock); -mockRequire("oxalis/model/accessors/skeletontracing_accessor", AccessorMock); -const SaveActions = mockRequire.reRequire("oxalis/model/actions/save_actions"); -const SaveReducer = mockRequire.reRequire("oxalis/model/reducers/save_reducer").default; -const { createEdge } = mockRequire.reRequire("oxalis/model/sagas/update_actions"); +mockRequire("oxalis/model/accessors/annotation_accessor", AccessorMock); + +const SaveActions = mockRequire.reRequire( + "oxalis/model/actions/save_actions", +) as typeof import("oxalis/model/actions/save_actions"); +const SaveReducer = mockRequire.reRequire("oxalis/model/reducers/save_reducer") + .default as typeof import("oxalis/model/reducers/save_reducer")["default"]; +const { createEdge } = mockRequire.reRequire( + "oxalis/model/sagas/update_actions", +) as typeof import("oxalis/model/sagas/update_actions"); -const initialState: { save: SaveState; activeUser: APIUser; tracing: EmptyObject } = { +const tracingId = "1234567890"; +const initialState = { activeUser: dummyUser, save: { - isBusyInfo: { - skeleton: false, - volumes: {}, - mappings: {}, - }, - queue: { - skeleton: [], - volumes: {}, - mappings: {}, - }, - lastSaveTimestamp: { - skeleton: 0, - volumes: {}, - mappings: {}, - }, + isBusy: false, + queue: [], + lastSaveTimestamp: 0, progressInfo: { processedActionCount: 0, totalActionCount: 0, }, }, - tracing: {}, -}; +} as any as OxalisState; test("Save should add update actions to the queue", (t) => { - const items = [createEdge(0, 1, 2), createEdge(0, 2, 3)]; + const items = [createEdge(0, 1, 2, tracingId), createEdge(0, 2, 3, tracingId)]; const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP); - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); + const pushAction = SaveActions.pushSaveQueueTransaction(items); const newState = SaveReducer(initialState, pushAction); - t.deepEqual(newState.save.queue.skeleton, saveQueue); + t.deepEqual(newState.save.queue, saveQueue); }); test("Save should add more update actions to the queue", (t) => { - const getItems = (treeId: number) => [createEdge(treeId, 1, 2), createEdge(treeId, 2, 3)]; + const getItems = (treeId: number) => [ + createEdge(treeId, 1, 2, tracingId), + createEdge(treeId, 2, 3, tracingId), + ]; const saveQueue = createSaveQueueFromUpdateActions([getItems(0), getItems(1)], TIMESTAMP); - const testState = SaveReducer( - initialState, - SaveActions.pushSaveQueueTransaction(getItems(0), "skeleton"), - ); - const newState = SaveReducer( - testState, - SaveActions.pushSaveQueueTransaction(getItems(1), "skeleton"), - ); - t.deepEqual(newState.save.queue.skeleton, saveQueue); + const testState = SaveReducer(initialState, SaveActions.pushSaveQueueTransaction(getItems(0))); + const newState = SaveReducer(testState, SaveActions.pushSaveQueueTransaction(getItems(1))); + t.deepEqual(newState.save.queue, saveQueue); }); test("Save should add zero update actions to the queue", (t) => { - // @ts-expect-error ts-migrate(7034) FIXME: Variable 'items' implicitly has type 'any[]' in so... Remove this comment to see the full error message - const items = []; - // @ts-expect-error ts-migrate(7005) FIXME: Variable 'items' implicitly has an 'any[]' type. - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); + const items: UpdateActionWithoutIsolationRequirement[] = []; + const pushAction = SaveActions.pushSaveQueueTransaction(items); const newState = SaveReducer(initialState, pushAction); - t.deepEqual(newState.save.queue.skeleton, []); + t.deepEqual(newState.save.queue, []); }); test("Save should remove one update actions from the queue", (t) => { - const firstItem = [createEdge(0, 1, 2)]; - const secondItem = [createEdge(1, 2, 3)]; + const firstItem = [createEdge(0, 1, 2, tracingId)]; + const secondItem = [createEdge(1, 2, 3, tracingId)]; const saveQueue = createSaveQueueFromUpdateActions([secondItem], TIMESTAMP); - const firstPushAction = SaveActions.pushSaveQueueTransaction(firstItem, "skeleton"); - const secondPushAction = SaveActions.pushSaveQueueTransaction(secondItem, "skeleton"); - const popAction = SaveActions.shiftSaveQueueAction(1, "skeleton"); + const firstPushAction = SaveActions.pushSaveQueueTransaction(firstItem); + const secondPushAction = SaveActions.pushSaveQueueTransaction(secondItem); + const popAction = SaveActions.shiftSaveQueueAction(1); let newState = SaveReducer(initialState, firstPushAction); newState = SaveReducer(newState, secondPushAction); newState = SaveReducer(newState, popAction); - t.deepEqual(newState.save.queue.skeleton, saveQueue); + t.deepEqual(newState.save.queue, saveQueue); }); test("Save should remove zero update actions from the queue", (t) => { - const items = [createEdge(0, 1, 2), createEdge(1, 2, 3)]; + const items = [createEdge(0, 1, 2, tracingId), createEdge(1, 2, 3, tracingId)]; const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP); - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); - const popAction = SaveActions.shiftSaveQueueAction(0, "skeleton"); + const pushAction = SaveActions.pushSaveQueueTransaction(items); + const popAction = SaveActions.shiftSaveQueueAction(0); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); - t.deepEqual(newState.save.queue.skeleton, saveQueue); + t.deepEqual(newState.save.queue, saveQueue); }); test("Save should remove all update actions from the queue (1/2)", (t) => { - const items = [createEdge(0, 1, 2), createEdge(0, 2, 3)]; - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); - const popAction = SaveActions.shiftSaveQueueAction(2, "skeleton"); + const items = [createEdge(0, 1, 2, tracingId), createEdge(0, 2, 3, tracingId)]; + const pushAction = SaveActions.pushSaveQueueTransaction(items); + const popAction = SaveActions.shiftSaveQueueAction(2); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); - t.deepEqual(newState.save.queue.skeleton, []); + t.deepEqual(newState.save.queue, []); }); test("Save should remove all update actions from the queue (2/2)", (t) => { - const items = [createEdge(0, 1, 2), createEdge(0, 2, 3)]; - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); - const popAction = SaveActions.shiftSaveQueueAction(5, "skeleton"); + const items = [createEdge(0, 1, 2, tracingId), createEdge(0, 2, 3, tracingId)]; + const pushAction = SaveActions.pushSaveQueueTransaction(items); + const popAction = SaveActions.shiftSaveQueueAction(5); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); - t.deepEqual(newState.save.queue.skeleton, []); + t.deepEqual(newState.save.queue, []); }); diff --git a/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts b/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts index caaac52ef2b..2ed50f7a8d7 100644 --- a/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts @@ -50,7 +50,6 @@ const initialSkeletonTracing: SkeletonTracing = { type: "skeleton", createdTimestamp: 0, tracingId: "tracingId", - version: 0, trees: {}, treeGroups: [], activeGroupId: null, diff --git a/frontend/javascripts/test/sagas/annotation_saga.spec.ts b/frontend/javascripts/test/sagas/annotation_saga.spec.ts index 1e5cc81772d..499b69bf819 100644 --- a/frontend/javascripts/test/sagas/annotation_saga.spec.ts +++ b/frontend/javascripts/test/sagas/annotation_saga.spec.ts @@ -3,7 +3,7 @@ import _ from "lodash"; import mockRequire from "mock-require"; import type { OxalisState } from "oxalis/store"; import { createMockTask } from "@redux-saga/testing-utils"; -import { take, put } from "redux-saga/effects"; +import { put, call } from "redux-saga/effects"; import dummyUser from "test/fixtures/dummy_user"; import defaultState from "oxalis/default_state"; import { expectValueDeepEqual } from "test/helpers/sagaHelpers"; @@ -12,6 +12,7 @@ import { setBlockedByUserAction, setOthersMayEditForAnnotationAction, } from "oxalis/model/actions/annotation_actions"; +import { ensureWkReady } from "oxalis/model/sagas/ready_sagas"; const createInitialState = (othersMayEdit: boolean, allowUpdate: boolean = true): OxalisState => ({ ...defaultState, @@ -62,7 +63,7 @@ function prepareTryAcquireMutexSaga(t: ExecutionContext, othersMayEdit: boolean) const listenForOthersMayEditMocked = createMockTask(); const storeState = createInitialState(othersMayEdit); const saga = acquireAnnotationMutexMaybe(); - expectValueDeepEqual(t, saga.next(), take("WK_READY")); + expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); t.deepEqual( saga.next(wkReadyAction()).value.type, "SELECT", diff --git a/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts b/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts index ab4b7c12d28..f4f17661d20 100644 --- a/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts +++ b/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts @@ -54,7 +54,7 @@ const treeGroups: TreeGroup[] = [ }, ]; const flycamMock = {} as any as Flycam; - +const tracingId = "someTracingId"; const createState = (trees: Tree[], _treeGroups: TreeGroup[]): OxalisState => ({ ...defaultState, tracing: { @@ -62,8 +62,7 @@ const createState = (trees: Tree[], _treeGroups: TreeGroup[]): OxalisState => ({ skeleton: { additionalAxes: [], createdTimestamp: 0, - version: 0, - tracingId: "tracingId", + tracingId, boundingBox: null, userBoundingBoxes: [], type: "skeleton", @@ -119,7 +118,7 @@ function _updateTreeVisibility(treeId: number, isVisible: boolean) { treeId, isVisible, } as any as Tree; - return updateTreeVisibility(tree); + return updateTreeVisibility(tree, tracingId); } function getActions(initialState: OxalisState, newState: OxalisState) { @@ -163,7 +162,7 @@ test("compactUpdateActions should compact when toggling all trees", (t) => { ); const [compactedActions] = getActions(allVisible, testState); // Root group should be toggled - t.deepEqual(compactedActions, [updateTreeGroupVisibility(undefined, false)]); + t.deepEqual(compactedActions, [updateTreeGroupVisibility(undefined, false, tracingId)]); }); test("compactUpdateActions should compact when toggling a group", (t) => { // Let's toggle group 3 (which contains group 4) @@ -179,7 +178,7 @@ test("compactUpdateActions should compact when toggling a group", (t) => { treeGroups, ); const [compactedActions] = getActions(allVisible, testState); - t.deepEqual(compactedActions, [updateTreeGroupVisibility(3, false)]); + t.deepEqual(compactedActions, [updateTreeGroupVisibility(3, false, tracingId)]); }); test("compactUpdateActions should compact when toggling a group except for one tree", (t) => { // Let's make all trees invisible except for tree 3. Compaction should yield a toggle-root and toggle 3 action @@ -196,7 +195,7 @@ test("compactUpdateActions should compact when toggling a group except for one t ); const [compactedActions] = getActions(allVisible, testState); t.deepEqual(compactedActions, [ - updateTreeGroupVisibility(undefined, false), + updateTreeGroupVisibility(undefined, false, tracingId), _updateTreeVisibility(3, true), ]); }); diff --git a/frontend/javascripts/test/sagas/saga_integration.spec.ts b/frontend/javascripts/test/sagas/saga_integration.spec.ts index 771fb88b194..ca21bf98201 100644 --- a/frontend/javascripts/test/sagas/saga_integration.spec.ts +++ b/frontend/javascripts/test/sagas/saga_integration.spec.ts @@ -15,16 +15,18 @@ import dummyUser from "test/fixtures/dummy_user"; import { hasRootSagaCrashed } from "oxalis/model/sagas/root_saga"; import { omit } from "lodash"; -const { - createTreeMapFromTreeArray, - generateTreeName, -} = require("oxalis/model/reducers/skeletontracing_reducer_helpers"); +const { createTreeMapFromTreeArray, generateTreeName } = + require("oxalis/model/reducers/skeletontracing_reducer_helpers") as typeof import("oxalis/model/reducers/skeletontracing_reducer_helpers"); const { addTreesAndGroupsAction, deleteNodeAction } = mockRequire.reRequire( "oxalis/model/actions/skeletontracing_actions", -); -const { discardSaveQueuesAction } = mockRequire.reRequire("oxalis/model/actions/save_actions"); -const UpdateActions = mockRequire.reRequire("oxalis/model/sagas/update_actions"); +) as typeof import("oxalis/model/actions/skeletontracing_actions"); +const { discardSaveQueuesAction } = mockRequire.reRequire( + "oxalis/model/actions/save_actions", +) as typeof import("oxalis/model/actions/save_actions"); +const UpdateActions = mockRequire.reRequire( + "oxalis/model/sagas/update_actions", +) as typeof import("oxalis/model/sagas/update_actions"); test.beforeEach(async (t) => { // Setup oxalis, this will execute model.fetch(...) and initialize the store with the tracing, etc. @@ -47,7 +49,8 @@ test.serial( "watchTreeNames saga should rename empty trees in tasks and these updates should be persisted", (t) => { const state = Store.getState(); - const treeWithEmptyName = enforceSkeletonTracing(state.tracing).trees[1]; + const skeletonTracing = enforceSkeletonTracing(state.tracing); + const treeWithEmptyName = skeletonTracing.trees[1]; const treeWithCorrectName = update(treeWithEmptyName, { name: { $set: generateTreeName(state, treeWithEmptyName.timestamp, treeWithEmptyName.treeId), @@ -56,9 +59,9 @@ test.serial( const expectedSaveQueue = createSaveQueueFromUpdateActions( [ [ - UpdateActions.updateTree(treeWithCorrectName), + UpdateActions.updateTree(treeWithCorrectName, skeletonTracing.tracingId), UpdateActions.updateSkeletonTracing( - Store.getState().tracing.skeleton, + enforceSkeletonTracing(Store.getState().tracing), [1, 2, 3], [], [0, 0, 0], @@ -67,10 +70,10 @@ test.serial( ], ], TIMESTAMP, - getStats(state.tracing, "skeleton", "irrelevant_in_skeleton_case") || undefined, + getStats(state.tracing) || undefined, ); // Reset the info field which is just for debugging purposes - const actualSaveQueue = state.save.queue.skeleton.map((entry) => { + const actualSaveQueue = state.save.queue.map((entry) => { return { ...omit(entry, "info"), info: "[]" }; }); // Once the updateTree update action is in the save queue, we're good. @@ -81,24 +84,23 @@ test.serial( test.serial("Save actions should not be chunked below the chunk limit (1/3)", (t) => { Store.dispatch(discardSaveQueuesAction()); - t.deepEqual(Store.getState().save.queue.skeleton, []); - const trees = generateDummyTrees(1000, 1); + t.deepEqual(Store.getState().save.queue, []); + // This will create 250 trees with one node each. Thus, 500 update actions will + // be sent to the server (two per node). + const trees = generateDummyTrees(250, 1); Store.dispatch(addTreesAndGroupsAction(createTreeMapFromTreeArray(trees), [])); - t.is(Store.getState().save.queue.skeleton.length, 1); - t.true( - Store.getState().save.queue.skeleton[0].actions.length < - MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton, - ); + t.is(Store.getState().save.queue.length, 1); + t.true(Store.getState().save.queue[0].actions.length < MAXIMUM_ACTION_COUNT_PER_BATCH); }); test.serial("Save actions should be chunked above the chunk limit (2/3)", (t) => { Store.dispatch(discardSaveQueuesAction()); - t.deepEqual(Store.getState().save.queue.skeleton, []); - const trees = generateDummyTrees(5000, 1); + t.deepEqual(Store.getState().save.queue, []); + const trees = generateDummyTrees(5000, 2); Store.dispatch(addTreesAndGroupsAction(createTreeMapFromTreeArray(trees), [])); const state = Store.getState(); - t.true(state.save.queue.skeleton.length > 1); - t.is(state.save.queue.skeleton[0].actions.length, MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton); + t.true(state.save.queue.length > 1); + t.is(state.save.queue[0].actions.length, MAXIMUM_ACTION_COUNT_PER_BATCH); }); test.serial("Save actions should be chunked after compacting (3/3)", (t) => { @@ -107,14 +109,14 @@ test.serial("Save actions should be chunked after compacting (3/3)", (t) => { const trees = generateDummyTrees(1, nodeCount); Store.dispatch(addTreesAndGroupsAction(createTreeMapFromTreeArray(trees), [])); Store.dispatch(discardSaveQueuesAction()); - t.deepEqual(Store.getState().save.queue.skeleton, []); + t.deepEqual(Store.getState().save.queue, []); // Delete some node, NOTE that this is not the node in the middle of the tree! // The addTreesAndGroupsAction gives new ids to nodes and edges in a non-deterministic way. const middleNodeId = trees[0].nodes[nodeCount / 2].id; Store.dispatch(deleteNodeAction(middleNodeId)); - const { skeleton: skeletonSaveQueue } = Store.getState().save.queue; + const skeletonSaveQueue = Store.getState().save.queue; // There should only be one chunk t.is(skeletonSaveQueue.length, 1); - t.true(skeletonSaveQueue[0].actions.length < MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton); + t.true(skeletonSaveQueue[0].actions.length < MAXIMUM_ACTION_COUNT_PER_BATCH); t.is(skeletonSaveQueue[0].actions[1].name, "moveTreeComponent"); }); diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 4707cf28bee..107d06edcbd 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -3,7 +3,7 @@ import { alert } from "libs/window"; import { setSaveBusyAction } from "oxalis/model/actions/save_actions"; import DiffableMap from "libs/diffable_map"; import compactSaveQueue from "oxalis/model/helpers/compaction/compact_save_queue"; -import { ensureWkReady } from "oxalis/model/sagas/wk_ready_saga"; +import { ensureWkReady } from "oxalis/model/sagas/ready_sagas"; import mockRequire from "mock-require"; import test from "ava"; import { createSaveQueueFromUpdateActions } from "../helpers/saveHelpers"; @@ -18,17 +18,27 @@ mockRequire("libs/date", DateMock); mockRequire("oxalis/model/sagas/root_saga", function* () { yield; }); -const UpdateActions = mockRequire.reRequire("oxalis/model/sagas/update_actions"); -const SaveActions = mockRequire.reRequire("oxalis/model/actions/save_actions"); -const { take, call, put } = mockRequire.reRequire("redux-saga/effects"); +const UpdateActions = mockRequire.reRequire( + "oxalis/model/sagas/update_actions", +) as typeof import("oxalis/model/sagas/update_actions"); +const SaveActions = mockRequire.reRequire( + "oxalis/model/actions/save_actions", +) as typeof import("oxalis/model/actions/save_actions"); +const { take, call, put } = mockRequire.reRequire( + "redux-saga/effects", +) as typeof import("redux-saga/effects"); const { pushSaveQueueAsync, - sendRequestToServer, + sendSaveRequestToServer, toggleErrorHighlighting, addVersionNumbers, sendRequestWithToken, -} = mockRequire.reRequire("oxalis/model/sagas/save_saga"); -const tracingId = "1234567890"; +} = mockRequire.reRequire( + "oxalis/model/sagas/save_saga", +) as typeof import("oxalis/model/sagas/save_saga"); + +const annotationId = "annotation-abcdefgh"; +const tracingId = "tracing-1234567890"; const initialState = { dataset: { dataSource: { @@ -54,6 +64,7 @@ const initialState = { }, annotationType: "Explorational", name: "", + tracingId, activeTreeId: 1, activeNodeId: null, restrictions: { @@ -67,21 +78,23 @@ const initialState = { }; const LAST_VERSION = 2; const TRACINGSTORE_URL = "test.webknossos.xyz"; -const TRACING_TYPE = "skeleton"; test("SaveSaga should compact multiple updateTracing update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ - [UpdateActions.updateSkeletonTracing(initialState, [1, 2, 3], [0, 0, 1], 1)], - [UpdateActions.updateSkeletonTracing(initialState, [2, 3, 4], [0, 0, 1], 2)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [1, 2, 3], [], [0, 0, 1], 1)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [2, 3, 4], [], [0, 0, 1], 2)], ], TIMESTAMP, ); t.deepEqual(compactSaveQueue(saveQueue), [saveQueue[1]]); }); test("SaveSaga should send update actions", (t) => { - const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; + const updateActions = [ + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + ]; const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); - const saga = pushSaveQueueAsync(TRACING_TYPE, tracingId); + const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); // setLastSaveTimestampAction @@ -95,55 +108,64 @@ test("SaveSaga should send update actions", (t) => { saga.next({ forcePush: SaveActions.saveNowAction(), }), - put(setSaveBusyAction(true, TRACING_TYPE, tracingId)), + put(setSaveBusyAction(true)), ); saga.next(); // advance to next select state - expectValueDeepEqual(t, saga.next(saveQueue), call(sendRequestToServer, TRACING_TYPE, tracingId)); + expectValueDeepEqual(t, saga.next(saveQueue), call(sendSaveRequestToServer)); saga.next(saveQueue.length); // select state - expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE, tracingId))); + expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false))); // Test that loop repeats saga.next(); // select state expectValueDeepEqual(t, saga.next([]), take("PUSH_SAVE_QUEUE_TRANSACTION")); }); + test("SaveSaga should send request to server", (t) => { const saveQueue = createSaveQueueFromUpdateActions( - [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]], + [ + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + ], TIMESTAMP, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next(LAST_VERSION); + saga.next(annotationId); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 2); expectValueDeepEqual( t, saga.next(TRACINGSTORE_URL), - call(sendRequestWithToken, `${TRACINGSTORE_URL}/tracings/skeleton/1234567890/update?token=`, { - method: "POST", - data: saveQueueWithVersions, - compress: false, - showErrorToast: false, - }), + call( + sendRequestWithToken, + `${TRACINGSTORE_URL}/tracings/annotation/${annotationId}/update?token=`, + { + method: "POST", + data: saveQueueWithVersions, + compress: false, + showErrorToast: false, + }, + ), ); }); test("SaveSaga should retry update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( - [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]], + [ + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + ], TIMESTAMP, ); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 2); const requestWithTokenCall = call( sendRequestWithToken, - `${TRACINGSTORE_URL}/tracings/skeleton/1234567890/update?token=`, + `${TRACINGSTORE_URL}/tracings/annotation/${annotationId}/update?token=`, { method: "POST", data: saveQueueWithVersions, @@ -151,13 +173,11 @@ test("SaveSaga should retry update actions", (t) => { showErrorToast: false, }, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next(LAST_VERSION); + saga.next(annotationId); expectValueDeepEqual(t, saga.next(TRACINGSTORE_URL), requestWithTokenCall); saga.throw("Timeout"); expectValueDeepEqual(t, saga.next("Explorational"), call(toggleErrorHighlighting, true)); @@ -170,27 +190,32 @@ test("SaveSaga should retry update actions", (t) => { }); test("SaveSaga should escalate on permanent client error update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( - [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]], + [ + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + ], TIMESTAMP, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next(LAST_VERSION); + saga.next(annotationId); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 2); expectValueDeepEqual( t, saga.next(TRACINGSTORE_URL), - call(sendRequestWithToken, `${TRACINGSTORE_URL}/tracings/skeleton/1234567890/update?token=`, { - method: "POST", - data: saveQueueWithVersions, - compress: false, - showErrorToast: false, - }), + call( + sendRequestWithToken, + `${TRACINGSTORE_URL}/tracings/annotation/${annotationId}/update?token=`, + { + method: "POST", + data: saveQueueWithVersions, + compress: false, + showErrorToast: false, + }, + ), ); saga.throw({ status: 409, @@ -208,9 +233,12 @@ test("SaveSaga should escalate on permanent client error update actions", (t) => t.throws(() => saga.next()); }); test("SaveSaga should send update actions right away and try to reach a state where all updates are saved", (t) => { - const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; + const updateActions = [ + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + ]; const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); - const saga = pushSaveQueueAsync(TRACING_TYPE, tracingId); + const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); saga.next(); // select state @@ -224,16 +252,19 @@ test("SaveSaga should send update actions right away and try to reach a state wh saga.next(); // select state - saga.next(saveQueue); // call sendRequestToServer + saga.next(saveQueue); // call sendSaveRequestToServer saga.next(1); // advance to select state - expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE, tracingId))); + expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false))); }); test("SaveSaga should not try to reach state with all actions being saved when saving is triggered by a timeout", (t) => { - const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; + const updateActions = [ + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + ]; const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); - const saga = pushSaveQueueAsync(TRACING_TYPE, tracingId); + const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); saga.next(); // select state @@ -245,115 +276,73 @@ test("SaveSaga should not try to reach state with all actions being saved when s timeout: "a placeholder", }); // put setSaveBusyAction - saga.next(saveQueue); // call sendRequestToServer + saga.next(saveQueue); // call sendSaveRequestToServer - expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE, tracingId))); + expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false))); }); test("SaveSaga should remove the correct update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ - [UpdateActions.updateSkeletonTracing(initialState, [1, 2, 3], [0, 0, 1], 1)], - [UpdateActions.updateSkeletonTracing(initialState, [2, 3, 4], [0, 0, 1], 2)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [1, 2, 3], [], [0, 0, 1], 1)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [2, 3, 4], [], [0, 0, 1], 2)], ], TIMESTAMP, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next(LAST_VERSION); + saga.next(annotationId); saga.next(TRACINGSTORE_URL); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setVersionNumberAction(3, TRACING_TYPE, tracingId)), - ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setLastSaveTimestampAction(TRACING_TYPE, tracingId)), - ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.shiftSaveQueueAction(2, TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setVersionNumberAction(3))); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); + expectValueDeepEqual(t, saga.next(), put(SaveActions.shiftSaveQueueAction(2))); }); test("SaveSaga should set the correct version numbers", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ - [UpdateActions.createEdge(1, 0, 1)], - [UpdateActions.createEdge(1, 1, 2)], - [UpdateActions.createEdge(2, 3, 4)], + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + [UpdateActions.createEdge(2, 3, 4, tracingId)], ], TIMESTAMP, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next(LAST_VERSION); + saga.next(annotationId); saga.next(TRACINGSTORE_URL); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setVersionNumberAction(LAST_VERSION + 3, TRACING_TYPE, tracingId)), - ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setLastSaveTimestampAction(TRACING_TYPE, tracingId)), - ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.shiftSaveQueueAction(3, TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setVersionNumberAction(LAST_VERSION + 3))); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); + expectValueDeepEqual(t, saga.next(), put(SaveActions.shiftSaveQueueAction(3))); }); test("SaveSaga should set the correct version numbers if the save queue was compacted", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ - [UpdateActions.updateSkeletonTracing(initialState, [1, 2, 3], [0, 0, 1], 1)], - [UpdateActions.updateSkeletonTracing(initialState, [2, 3, 4], [0, 0, 1], 2)], - [UpdateActions.updateSkeletonTracing(initialState, [3, 4, 5], [0, 0, 1], 3)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [1, 2, 3], [], [0, 0, 1], 1)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [2, 3, 4], [], [0, 0, 1], 2)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [3, 4, 5], [], [0, 0, 1], 3)], ], TIMESTAMP, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next(LAST_VERSION); + saga.next(annotationId); saga.next(TRACINGSTORE_URL); // two of the updateTracing update actions are removed by compactSaveQueue - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setVersionNumberAction(LAST_VERSION + 1, TRACING_TYPE, tracingId)), - ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setLastSaveTimestampAction(TRACING_TYPE, tracingId)), - ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.shiftSaveQueueAction(3, TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setVersionNumberAction(LAST_VERSION + 1))); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); + expectValueDeepEqual(t, saga.next(), put(SaveActions.shiftSaveQueueAction(3))); }); test("SaveSaga addVersionNumbers should set the correct version numbers", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ - [UpdateActions.createEdge(1, 0, 1)], - [UpdateActions.createEdge(1, 1, 2)], - [UpdateActions.createEdge(2, 3, 4)], + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + [UpdateActions.createEdge(2, 3, 4, tracingId)], ], TIMESTAMP, diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index d6c9490cc8b..d125f408ca3 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -1,11 +1,5 @@ import "test/sagas/skeletontracing_saga.mock.js"; -import type { - Flycam, - HybridTracing, - OxalisState, - SaveQueueEntry, - SkeletonTracing, -} from "oxalis/store"; +import type { Flycam, HybridTracing, OxalisState, SkeletonTracing } from "oxalis/store"; import ChainReducer from "test/helpers/chainReducer"; import DiffableMap from "libs/diffable_map"; import EdgeCollection from "oxalis/model/edge_collection"; @@ -27,6 +21,8 @@ import { TreeTypeEnum } from "oxalis/constants"; import type { Action } from "oxalis/model/actions/actions"; import type { ServerSkeletonTracing } from "types/api_flow_types"; import { enforceSkeletonTracing } from "oxalis/model/accessors/skeletontracing_accessor"; +import type { UpdateActionWithoutIsolationRequirement } from "oxalis/model/sagas/update_actions"; +import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; const TIMESTAMP = 1494347146379; const DateMock = { @@ -77,12 +73,18 @@ function testDiffing( ); } -function compactSaveQueueWithUpdateActions( - queue: Array, +function createCompactedSaveQueueFromUpdateActions( + updateActions: UpdateActionWithoutIsolationRequirement[][], + timestamp: number, tracing: SkeletonTracing, -): Array { + stats: TracingStats | null = null, +) { return compactSaveQueue( - queue.map((batch) => ({ ...batch, actions: compactUpdateActions(batch.actions, tracing) })), + createSaveQueueFromUpdateActions( + updateActions.map((batch) => compactUpdateActions(batch, tracing)), + timestamp, + stats, + ), ); } @@ -90,7 +92,6 @@ const skeletonTracing: SkeletonTracing = { type: "skeleton", createdTimestamp: 0, tracingId: "tracingId", - version: 0, trees: {}, treeGroups: [], activeGroupId: null, @@ -106,6 +107,7 @@ const skeletonTracing: SkeletonTracing = { showSkeletons: true, additionalAxes: [], }; + const serverSkeletonTracing: ServerSkeletonTracing = { ...skeletonTracing, id: skeletonTracing.tracingId, @@ -180,7 +182,6 @@ test("SkeletonTracingSaga shouldn't do anything if unchanged (saga test)", (t) = const saga = setupSavingForTracingType( SkeletonTracingActions.initializeSkeletonTracingAction(serverSkeletonTracing), ); - saga.next(); // forking pushSaveQueueAsync saga.next(); saga.next(initialState.tracing.skeleton); @@ -200,7 +201,6 @@ test("SkeletonTracingSaga should do something if changed (saga test)", (t) => { const saga = setupSavingForTracingType( SkeletonTracingActions.initializeSkeletonTracingAction(serverSkeletonTracing), ); - saga.next(); // forking pushSaveQueueAsync saga.next(); saga.next(initialState.tracing.skeleton); @@ -213,11 +213,7 @@ test("SkeletonTracingSaga should do something if changed (saga test)", (t) => { saga.next(newState.flycam); const items = execCall(t, saga.next(newState.viewModeData.plane.tdCamera)); t.true(withoutUpdateTracing(items).length > 0); - expectValueDeepEqual( - t, - saga.next(items), - put(pushSaveQueueTransaction(items, "skeleton", serverSkeletonTracing.id)), - ); + expectValueDeepEqual(t, saga.next(items), put(pushSaveQueueTransaction(items))); }); test("SkeletonTracingSaga should emit createNode update actions", (t) => { const newState = SkeletonTracingReducer(initialState, createNodeAction); @@ -249,6 +245,7 @@ test("SkeletonTracingSaga should emit createNode and createEdge update actions", t.like(updateActions[0], { name: "createNode", value: { + actionTracingId: "tracingId", id: 1, treeId: 1, }, @@ -256,6 +253,7 @@ test("SkeletonTracingSaga should emit createNode and createEdge update actions", t.like(updateActions[1], { name: "createNode", value: { + actionTracingId: "tracingId", id: 2, treeId: 1, }, @@ -263,6 +261,7 @@ test("SkeletonTracingSaga should emit createNode and createEdge update actions", t.deepEqual(updateActions[2], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 1, source: 1, target: 2, @@ -281,10 +280,17 @@ test("SkeletonTracingSaga should emit createNode and createTree update actions", initialState.flycam, newState.flycam, ); - t.like(updateActions[0], { name: "createTree", value: { id: 2 } }); + t.like(updateActions[0], { + name: "createTree", + value: { + actionTracingId: "tracingId", + id: 2, + }, + }); t.like(updateActions[1], { name: "createNode", value: { + actionTracingId: "tracingId", id: 2, treeId: 2, }, @@ -292,6 +298,7 @@ test("SkeletonTracingSaga should emit createNode and createTree update actions", t.like(updateActions[2], { name: "createNode", value: { + actionTracingId: "tracingId", id: 1, treeId: 1, }, @@ -314,14 +321,22 @@ test("SkeletonTracingSaga should emit first deleteNode and then createNode updat t.deepEqual(updateActions[0], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 2, }, }); - t.deepEqual(updateActions[1], { name: "deleteTree", value: { id: 2 } }); + t.deepEqual(updateActions[1], { + name: "deleteTree", + value: { + actionTracingId: "tracingId", + id: 2, + }, + }); t.like(updateActions[2], { name: "createNode", value: { + actionTracingId: "tracingId", id: 2, treeId: 1, }, @@ -329,6 +344,7 @@ test("SkeletonTracingSaga should emit first deleteNode and then createNode updat t.deepEqual(updateActions[3], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 1, source: 1, target: 2, @@ -347,6 +363,7 @@ test("SkeletonTracingSaga should emit a deleteNode update action", (t) => { t.deepEqual(updateActions[0], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 1, treeId: 1, }, @@ -367,6 +384,7 @@ test("SkeletonTracingSaga should emit a deleteEdge update action", (t) => { t.deepEqual(updateActions[0], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 1, }, @@ -374,6 +392,7 @@ test("SkeletonTracingSaga should emit a deleteEdge update action", (t) => { t.deepEqual(updateActions[1], { name: "deleteEdge", value: { + actionTracingId: "tracingId", treeId: 1, source: 1, target: 2, @@ -389,7 +408,13 @@ test("SkeletonTracingSaga should emit a deleteTree update action", (t) => { testState.flycam, newState.flycam, ); - t.like(updateActions[0], { name: "deleteTree", value: { id: 2 } }); + t.like(updateActions[0], { + name: "deleteTree", + value: { + actionTracingId: "tracingId", + id: 2, + }, + }); }); test("SkeletonTracingSaga should emit an updateNode update action", (t) => { const testState = SkeletonTracingReducer(initialState, createNodeAction); @@ -403,6 +428,7 @@ test("SkeletonTracingSaga should emit an updateNode update action", (t) => { t.like(updateActions[0], { name: "updateNode", value: { + actionTracingId: "tracingId", id: 1, treeId: 1, radius: 12, @@ -435,6 +461,7 @@ test("SkeletonTracingSaga should emit an updateTree update actions (comments)", t.like(updateActions[0], { name: "updateTree", value: { + actionTracingId: "tracingId", id: 1, comments: [ { @@ -471,6 +498,7 @@ test("SkeletonTracingSaga should emit an updateTree update actions (branchpoints t.like(updateActions[0], { name: "updateTree", value: { + actionTracingId: "tracingId", id: 1, branchPoints: [ { @@ -501,6 +529,7 @@ test("SkeletonTracingSaga should emit update actions on merge tree", (t) => { t.deepEqual(updateActions[0], { name: "deleteNode", value: { + actionTracingId: "tracingId", treeId: 1, nodeId: 1, }, @@ -508,12 +537,14 @@ test("SkeletonTracingSaga should emit update actions on merge tree", (t) => { t.deepEqual(updateActions[1], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 1, }, }); t.like(updateActions[2], { name: "createNode", value: { + actionTracingId: "tracingId", id: 1, treeId: 2, }, @@ -521,6 +552,7 @@ test("SkeletonTracingSaga should emit update actions on merge tree", (t) => { t.deepEqual(updateActions[3], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 3, target: 1, @@ -549,12 +581,14 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.like(updateActions[0], { name: "createTree", value: { + actionTracingId: "tracingId", id: 3, }, }); t.like(updateActions[1], { name: "createNode", value: { + actionTracingId: "tracingId", id: 2, treeId: 3, }, @@ -562,12 +596,14 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.like(updateActions[2], { name: "createTree", value: { + actionTracingId: "tracingId", id: 4, }, }); t.like(updateActions[3], { name: "createNode", value: { + actionTracingId: "tracingId", id: 4, treeId: 4, }, @@ -575,6 +611,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[4], { name: "deleteNode", value: { + actionTracingId: "tracingId", treeId: 2, nodeId: 2, }, @@ -582,6 +619,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[5], { name: "deleteNode", value: { + actionTracingId: "tracingId", treeId: 2, nodeId: 3, }, @@ -589,6 +627,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[6], { name: "deleteNode", value: { + actionTracingId: "tracingId", treeId: 2, nodeId: 4, }, @@ -596,6 +635,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[7], { name: "deleteEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 2, target: 3, @@ -604,6 +644,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[8], { name: "deleteEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 3, target: 4, @@ -612,6 +653,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[9], { name: "deleteEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 3, target: 1, @@ -635,16 +677,18 @@ test("compactUpdateActions should detect a tree merge (1/3)", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing, ); + const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; // This should result in a moved treeComponent of size three t.deepEqual(simplifiedFirstBatch[0], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [1, 2, 3], @@ -654,6 +698,7 @@ test("compactUpdateActions should detect a tree merge (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 1, }, }); @@ -661,6 +706,7 @@ test("compactUpdateActions should detect a tree merge (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 4, target: 1, @@ -695,16 +741,18 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { testDiffing(newState1.tracing, newState2.tracing, newState1.flycam, newState2.flycam), ); // compactUpdateActions is triggered by the saving, it can therefore contain the results of more than one diffing - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState2.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + updateActions, + TIMESTAMP, + skeletonTracing, ); + // This should result in one created node and its edge (a) const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.like(simplifiedFirstBatch[0], { name: "createNode", value: { + actionTracingId: "tracingId", id: 5, treeId: 2, }, @@ -712,6 +760,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.like(simplifiedFirstBatch[1], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 4, target: 5, @@ -723,6 +772,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.deepEqual(simplifiedSecondBatch[0], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [1, 2, 3], @@ -732,6 +782,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.deepEqual(simplifiedSecondBatch[1], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 1, }, }); @@ -742,6 +793,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.deepEqual(simplifiedSecondBatch[4], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 5, target: 1, @@ -797,16 +849,17 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { ), ); // compactUpdateActions is triggered by the saving, it can therefore contain the results of more than one diffing - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + updateActions, + TIMESTAMP, + skeletonTracing, ); // This should result in a moved treeComponent of size one (a) const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.deepEqual(simplifiedFirstBatch[0], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 2, targetId: 1, nodeIds: [4], @@ -816,6 +869,7 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 2, }, }); @@ -823,6 +877,7 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 1, source: 1, target: 4, @@ -841,6 +896,7 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedThirdBatch[0], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 2, targetId: 1, nodeIds: [5, 6], @@ -849,12 +905,14 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedThirdBatch[1], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 2, }, }); t.deepEqual(simplifiedThirdBatch[2], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 1, source: 1, target: 6, @@ -879,16 +937,19 @@ test("compactUpdateActions should detect a tree split (1/3)", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing, ); + // This should result in a new tree const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.like(simplifiedFirstBatch[0], { name: "createTree", value: { + actionTracingId: "tracingId", id: 2, }, }); @@ -896,6 +957,7 @@ test("compactUpdateActions should detect a tree split (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [3, 4], @@ -905,6 +967,7 @@ test("compactUpdateActions should detect a tree split (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 1, }, @@ -937,22 +1000,24 @@ test("compactUpdateActions should detect a tree split (2/3)", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing, ); // This should result in two new trees and two moved treeComponents of size three and two const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.like(simplifiedFirstBatch[0], { name: "createTree", value: { + actionTracingId: "tracingId", id: 2, }, }); t.deepEqual(simplifiedFirstBatch[1], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [3, 4], @@ -961,12 +1026,14 @@ test("compactUpdateActions should detect a tree split (2/3)", (t) => { t.like(simplifiedFirstBatch[2], { name: "createTree", value: { + actionTracingId: "tracingId", id: 3, }, }); t.deepEqual(simplifiedFirstBatch[3], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 3, nodeIds: [5, 6, 7], @@ -976,6 +1043,7 @@ test("compactUpdateActions should detect a tree split (2/3)", (t) => { t.deepEqual(simplifiedFirstBatch[4], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 1, }, @@ -1009,16 +1077,17 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { updateActions.push( testDiffing(newState1.tracing, newState2.tracing, newState1.flycam, newState2.flycam), ); - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState2.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + updateActions, + TIMESTAMP, + skeletonTracing, ); // This should result in the creation of a new tree (a) const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.like(simplifiedFirstBatch[0], { name: "createTree", value: { + actionTracingId: "tracingId", id: 2, }, }); @@ -1026,6 +1095,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [3, 4, 5, 6], @@ -1035,6 +1105,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 1, }, @@ -1047,6 +1118,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.like(simplifiedSecondBatch[0], { name: "createTree", value: { + actionTracingId: "tracingId", id: 3, }, }); @@ -1054,6 +1126,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedSecondBatch[1], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 2, targetId: 3, nodeIds: [5, 6], @@ -1063,6 +1136,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedSecondBatch[2], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 4, treeId: 2, }, @@ -1096,17 +1170,18 @@ test("compactUpdateActions should do nothing if it cannot compact", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + const saveQueueOriginal = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing, ); // The deleteTree optimization in compactUpdateActions (that is unrelated to this test) // will remove the first deleteNode update action as the first tree is deleted because of the merge, // therefore remove it here as well - saveQueue[0].actions.shift(); + saveQueueOriginal[0].actions.shift(); // Nothing should be changed as the moveTreeComponent update action cannot be inserted - t.deepEqual(simplifiedUpdateActions, saveQueue); + t.deepEqual(simplifiedUpdateActions, saveQueueOriginal); }); test("compactUpdateActions should detect a deleted tree", (t) => { const testState = ChainReducer(initialState) @@ -1125,15 +1200,16 @@ test("compactUpdateActions should detect a deleted tree", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing, ); const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.deepEqual(simplifiedFirstBatch[0], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 2, }, }); @@ -1157,15 +1233,16 @@ test("compactUpdateActions should not detect a deleted tree if there is no delet testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing, ); const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.deepEqual(simplifiedFirstBatch[0], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 2, }, @@ -1173,6 +1250,7 @@ test("compactUpdateActions should not detect a deleted tree if there is no delet t.deepEqual(simplifiedFirstBatch[1], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 3, treeId: 2, }, diff --git a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts index 206b45c64d3..45f55608aa2 100644 --- a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts @@ -56,7 +56,6 @@ const serverVolumeTracing: ServerVolumeTracing = { id: "tracingId", elementClass: "uint32", createdTimestamp: 0, - version: 0, boundingBox: { topLeft: { x: 0, @@ -147,7 +146,6 @@ test("VolumeTracingSaga shouldn't do anything if unchanged (saga test)", (t) => const saga = setupSavingForTracingType( VolumeTracingActions.initializeVolumeTracingAction(serverVolumeTracing), ); - saga.next(); // forking pushSaveQueueAsync saga.next(); saga.next(initialState.tracing.volumes[0]); @@ -168,7 +166,6 @@ test("VolumeTracingSaga should do something if changed (saga test)", (t) => { const saga = setupSavingForTracingType( VolumeTracingActions.initializeVolumeTracingAction(serverVolumeTracing), ); - saga.next(); // forking pushSaveQueueAsync saga.next(); saga.next(initialState.tracing.volumes[0]); @@ -182,11 +179,7 @@ test("VolumeTracingSaga should do something if changed (saga test)", (t) => { const items = execCall(t, saga.next(newState.viewModeData.plane.tdCamera)); t.is(withoutUpdateTracing(items).length, 0); t.true(items[0].value.activeSegmentId === ACTIVE_CELL_ID); - expectValueDeepEqual( - t, - saga.next(items), - put(pushSaveQueueTransaction(items, "volume", volumeTracing.tracingId)), - ); + expectValueDeepEqual(t, saga.next(items), put(pushSaveQueueTransaction(items))); }); test("VolumeTracingSaga should create a volume layer (saga test)", (t) => { diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md index 48087c9ed20..f94da6482c7 100644 --- a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md +++ b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md @@ -1008,7 +1008,7 @@ Generated by [AVA](https://avajs.dev). url: 'http://localhost:9000', }, datasetId: '570b9f4e4bb848d0885ee711', - description: 'new description', + description: '', id: 'id', isLockedByOwner: false, modified: 'modified', @@ -1250,7 +1250,6 @@ Generated by [AVA](https://avajs.dev). trees: [], typ: 'Skeleton', userBoundingBoxes: [], - version: 0, zoomLevel: 2, } @@ -1317,7 +1316,6 @@ Generated by [AVA](https://avajs.dev). segments: [], typ: 'Volume', userBoundingBoxes: [], - version: 0, volumeBucketDataHasChanged: false, zoomLevel: 1, } @@ -1346,7 +1344,6 @@ Generated by [AVA](https://avajs.dev). trees: [], typ: 'Skeleton', userBoundingBoxes: [], - version: 0, zoomLevel: 2, }, volume: { @@ -1408,7 +1405,6 @@ Generated by [AVA](https://avajs.dev). segments: [], typ: 'Volume', userBoundingBoxes: [], - version: 0, volumeBucketDataHasChanged: false, zoomLevel: 1, }, @@ -1437,7 +1433,6 @@ Generated by [AVA](https://avajs.dev). trees: [], typ: 'Skeleton', userBoundingBoxes: [], - version: 2, zoomLevel: 2, } @@ -1544,7 +1539,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3727, z: 1545, }, @@ -1564,7 +1559,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3727, z: 1545, }, @@ -1584,7 +1579,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3727, z: 1545, }, @@ -1604,7 +1599,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3727, z: 1545, }, @@ -1624,7 +1619,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3727, z: 1545, }, @@ -1705,7 +1700,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3726, z: 1545, }, @@ -1725,7 +1720,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3726, z: 1545, }, @@ -1745,7 +1740,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3726, z: 1545, }, @@ -1765,7 +1760,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3726, z: 1545, }, @@ -1785,7 +1780,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3726, z: 1545, }, @@ -1866,7 +1861,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3726, z: 1545, }, @@ -1886,7 +1881,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3726, z: 1545, }, @@ -1906,7 +1901,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3726, z: 1545, }, @@ -1926,7 +1921,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3726, z: 1545, }, @@ -1946,7 +1941,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3726, z: 1545, }, @@ -2027,7 +2022,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3725, z: 1545, }, @@ -2047,7 +2042,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3725, z: 1545, }, @@ -2067,7 +2062,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3725, z: 1545, }, @@ -2087,7 +2082,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3725, z: 1545, }, @@ -2107,7 +2102,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3725, z: 1545, }, @@ -2188,7 +2183,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3725, z: 1545, }, @@ -2208,7 +2203,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3725, z: 1545, }, @@ -2228,7 +2223,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3725, z: 1545, }, @@ -2248,7 +2243,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3725, z: 1545, }, @@ -2268,7 +2263,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3725, z: 1545, }, @@ -2287,7 +2282,6 @@ Generated by [AVA](https://avajs.dev). ], typ: 'Skeleton', userBoundingBoxes: [], - version: 2, zoomLevel: 2, } @@ -2377,7 +2371,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3727, z: 1545, }, @@ -2397,7 +2391,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3727, z: 1545, }, @@ -2417,7 +2411,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3727, z: 1545, }, @@ -2437,7 +2431,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3727, z: 1545, }, @@ -2457,7 +2451,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3727, z: 1545, }, @@ -2538,7 +2532,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3726, z: 1545, }, @@ -2558,7 +2552,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3726, z: 1545, }, @@ -2578,7 +2572,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3726, z: 1545, }, @@ -2598,7 +2592,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3726, z: 1545, }, @@ -2618,7 +2612,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3726, z: 1545, }, @@ -2699,7 +2693,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3726, z: 1545, }, @@ -2719,7 +2713,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3726, z: 1545, }, @@ -2739,7 +2733,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3726, z: 1545, }, @@ -2759,7 +2753,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3726, z: 1545, }, @@ -2779,7 +2773,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3726, z: 1545, }, @@ -2860,7 +2854,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3725, z: 1545, }, @@ -2880,7 +2874,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3725, z: 1545, }, @@ -2900,7 +2894,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3725, z: 1545, }, @@ -2920,7 +2914,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3725, z: 1545, }, @@ -2940,7 +2934,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3725, z: 1545, }, @@ -3039,7 +3033,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3725, z: 1545, }, @@ -3059,7 +3053,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3725, z: 1545, }, @@ -3079,7 +3073,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3725, z: 1545, }, @@ -3099,7 +3093,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3725, z: 1545, }, @@ -3119,7 +3113,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3725, z: 1545, }, @@ -3138,6 +3132,5 @@ Generated by [AVA](https://avajs.dev). ], typ: 'Skeleton', userBoundingBoxes: [], - version: 2, zoomLevel: 2, } diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap index 8a0b4fd925d..4ce14317c58 100644 Binary files a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap and b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap differ diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index 958651b32ec..9de1d34a80d 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -12,6 +12,7 @@ import type { ServerUpdateAction } from "oxalis/model/sagas/update_actions"; import type { SkeletonTracingStats, TracingStats, + VolumeTracingStats, } from "oxalis/model/accessors/annotation_accessor"; import type { Vector3, @@ -387,7 +388,12 @@ export enum TracingTypeEnum { volume = "volume", hybrid = "hybrid", } -export type TracingType = keyof typeof TracingTypeEnum; +export enum AnnotationLayerEnum { + Skeleton = "Skeleton", + Volume = "Volume", +} +export type TracingType = "skeleton" | "volume" | "hybrid"; +export type AnnotationLayerType = "Skeleton" | "Volume"; export type APITaskType = { readonly id: string; readonly summary: string; @@ -470,12 +476,12 @@ export type APITask = { export type AnnotationLayerDescriptor = { name: string; tracingId: string; - typ: "Skeleton" | "Volume"; - stats: TracingStats | EmptyObject; + typ: AnnotationLayerType; + stats: SkeletonTracingStats | VolumeTracingStats | EmptyObject; }; -export type EditableLayerProperties = Partial<{ +export type EditableLayerProperties = { name: string; -}>; +}; export type APIAnnotationInfo = { readonly annotationLayers: Array; readonly datasetId: string; @@ -487,7 +493,7 @@ export type APIAnnotationInfo = { readonly name: string; // Not used by the front-end anymore, but the // backend still serves this for backward-compatibility reasons. - readonly stats?: SkeletonTracingStats | EmptyObject; + readonly stats?: TracingStats | EmptyObject | null | undefined; readonly state: string; readonly isLockedByOwner: boolean; readonly tags: Array; @@ -574,8 +580,21 @@ export type APITimeTrackingPerAnnotation = { task: string | undefined; projectName: string | undefined; timeMillis: number; - annotationLayerStats: Array; + annotationLayerStats: TracingStats; +}; +type APITracingStoreAnnotationLayer = { + readonly tracingId: string; + readonly name: string; + readonly typ: AnnotationLayerType; +}; + +export type APITracingStoreAnnotation = { + readonly description: string; + readonly version: number; + readonly earliestAccessibleVersion: number; + readonly annotationLayers: APITracingStoreAnnotationLayer[]; }; + export type APITimeTrackingPerUser = { user: APIUserCompact & { email: string; @@ -840,9 +859,12 @@ export type ServerTracingBase = { editPositionAdditionalCoordinates: AdditionalCoordinate[] | null; editRotation: Point3; error?: string; - version: number; zoomLevel: number; additionalAxes: ServerAdditionalAxis[]; + // The backend sends the version property, but the front-end should + // not care about it. To ensure this, parseProtoTracing will remove + // the property. + version?: number; }; export type ServerSkeletonTracing = ServerTracingBase & { // The following property is added when fetching the @@ -883,12 +905,11 @@ export type ServerVolumeTracing = ServerTracingBase & { export type ServerTracing = ServerSkeletonTracing | ServerVolumeTracing; export type ServerEditableMapping = { createdTimestamp: number; - version: number; - mappingName: string; baseMappingName: string; // The id of the volume tracing the editable mapping belongs to tracingId: string; }; + export type APIMeshFile = { meshFileName: string; mappingName?: string | null | undefined; diff --git a/test/backend/Dummies.scala b/test/backend/Dummies.scala index 8aaf33e4b0f..a66085396b7 100644 --- a/test/backend/Dummies.scala +++ b/test/backend/Dummies.scala @@ -61,6 +61,8 @@ object Dummies { Some(true)) val treeGroup2: TreeGroup = TreeGroup("Axon 2", 2, Seq.empty, Some(true)) + val tracingId: String = "dummyTracingId" + val skeletonTracing: SkeletonTracing = SkeletonTracing( "dummy_dataset", Seq(tree1, tree2), diff --git a/test/backend/SkeletonUpdateActionsUnitTestSuite.scala b/test/backend/SkeletonUpdateActionsUnitTestSuite.scala index 8d12fe3331d..7c6b191b9f6 100644 --- a/test/backend/SkeletonUpdateActionsUnitTestSuite.scala +++ b/test/backend/SkeletonUpdateActionsUnitTestSuite.scala @@ -9,7 +9,7 @@ import org.scalatestplus.play._ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { - private def applyUpdateAction(action: UpdateAction.SkeletonUpdateAction): SkeletonTracing = + private def applyUpdateAction(action: SkeletonUpdateAction): SkeletonTracing = action.applyOn(Dummies.skeletonTracing) def listConsistsOfLists[T](joinedList: Seq[T], sublist1: Seq[T], sublist2: Seq[T]): Boolean = @@ -30,7 +30,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { comments = List[UpdateActionComment](), groupId = None, isVisible = Option(true), - edgesAreVisible = Option(true) + edgesAreVisible = Option(true), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(createTreeAction) @@ -47,7 +48,7 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "DeleteTreeSkeletonAction" should { "delete the specified tree" in { - val deleteTreeAction = new DeleteTreeSkeletonAction(id = 1) + val deleteTreeAction = new DeleteTreeSkeletonAction(id = 1, actionTracingId = Dummies.tracingId) val result = applyUpdateAction(deleteTreeAction) assert(result.trees.length == Dummies.skeletonTracing.trees.length - 1) @@ -70,7 +71,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { groupId = None, metadata = Some( List(MetadataEntry("myKey", numberValue = Some(5.0)), - MetadataEntry("anotherKey", stringListValue = Some(Seq("hello", "there"))))) + MetadataEntry("anotherKey", stringListValue = Some(Seq("hello", "there"))))), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateTreeAction) @@ -88,7 +90,7 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "MergeTreeSkeletonAction" should { "merge the specified trees" in { - val mergeTreeAction = new MergeTreeSkeletonAction(sourceId = 1, targetId = 2) + val mergeTreeAction = new MergeTreeSkeletonAction(sourceId = 1, targetId = 2, actionTracingId = Dummies.tracingId) val sourceTree = Dummies.tree1 val targetTree = Dummies.tree2 val result = applyUpdateAction(mergeTreeAction) @@ -109,7 +111,10 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "MoveTreeComponentSkeletonAction" should { "move the specified (separate) nodes" in { val moveTreeComponentSkeletonAction = - new MoveTreeComponentSkeletonAction(Dummies.comp1Nodes.map(_.id).toList, sourceId = 3, targetId = 4) + new MoveTreeComponentSkeletonAction(Dummies.comp1Nodes.map(_.id).toList, + sourceId = 3, + targetId = 4, + actionTracingId = Dummies.tracingId) val result = moveTreeComponentSkeletonAction.applyOn(Dummies.componentSkeletonTracing) assert(result.trees.length == Dummies.componentSkeletonTracing.trees.length) @@ -127,7 +132,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "CreateEdgeSkeletonAction" should { "create a new edge in the right tree" in { - val createEdgeSkeletonAction = new CreateEdgeSkeletonAction(source = 1, target = 7, treeId = 1) + val createEdgeSkeletonAction = + new CreateEdgeSkeletonAction(source = 1, target = 7, treeId = 1, actionTracingId = Dummies.tracingId) val result = applyUpdateAction(createEdgeSkeletonAction) assert(result.trees.length == Dummies.skeletonTracing.trees.length) @@ -140,8 +146,10 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "DeleteEdgeSkeletonAction" should { "undo CreateEdgeSkeletonAction" in { - val createEdgeSkeletonAction = new CreateEdgeSkeletonAction(source = 0, target = 7, treeId = 1) - val deleteEdgeSkeletonAction = new DeleteEdgeSkeletonAction(source = 0, target = 7, treeId = 1) + val createEdgeSkeletonAction = + new CreateEdgeSkeletonAction(source = 0, target = 7, treeId = 1, actionTracingId = Dummies.tracingId) + val deleteEdgeSkeletonAction = + new DeleteEdgeSkeletonAction(source = 0, target = 7, treeId = 1, actionTracingId = Dummies.tracingId) val result = deleteEdgeSkeletonAction.applyOn(createEdgeSkeletonAction.applyOn(Dummies.skeletonTracing)) assert(result == Dummies.skeletonTracing) } @@ -161,7 +169,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { Option(newNode.interpolation), treeId = 1, Dummies.timestamp, - None + None, + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(createNodeSkeletonAction) assert(result.trees.length == Dummies.skeletonTracing.trees.length) @@ -186,7 +195,7 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { Option(newNode.interpolation), treeId = 1, Dummies.timestamp, - None + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateNodeSkeletonAction) assert(result.trees.length == Dummies.skeletonTracing.trees.length) @@ -211,9 +220,10 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { Option(newNode.interpolation), treeId = 1, Dummies.timestamp, - None + actionTracingId = Dummies.tracingId ) - val deleteNodeSkeletonAction = new DeleteNodeSkeletonAction(newNode.id, treeId = 1) + val deleteNodeSkeletonAction = + new DeleteNodeSkeletonAction(newNode.id, treeId = 1, actionTracingId = Dummies.tracingId) val result = deleteNodeSkeletonAction.applyOn(createNodeSkeletonAction.applyOn(Dummies.skeletonTracing)) assert(result == Dummies.skeletonTracing) } @@ -223,7 +233,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "update a top level tree group" in { val updatedName = "Axon 2 updated" val updateTreeGroupsSkeletonAction = new UpdateTreeGroupsSkeletonAction( - List(UpdateActionTreeGroup(updatedName, 2, Some(true), List())) + List(UpdateActionTreeGroup(updatedName, 2, Some(true), List())), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateTreeGroupsSkeletonAction) assert(result.trees == Dummies.skeletonTracing.trees) @@ -238,7 +249,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { UpdateActionTreeGroup(updatedNameTop, 1, Some(true), - List(UpdateActionTreeGroup(updatedNameNested, 3, Some(false), List())))) + List(UpdateActionTreeGroup(updatedNameNested, 3, Some(false), List())))), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateTreeGroupsSkeletonAction) assert(result.trees == Dummies.skeletonTracing.trees) @@ -261,7 +273,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { editPosition, editRotation, zoomLevel, - userBoundingBox + userBoundingBox, + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateTreeGroupsSkeletonAction) assert(result.trees == Dummies.skeletonTracing.trees) diff --git a/test/backend/UpdateGroupHandlingUnitTestSuite.scala b/test/backend/UpdateGroupHandlingUnitTestSuite.scala new file mode 100644 index 00000000000..3012f03159d --- /dev/null +++ b/test/backend/UpdateGroupHandlingUnitTestSuite.scala @@ -0,0 +1,86 @@ +package backend + +import com.scalableminds.webknossos.tracingstore.annotation.{RevertToVersionAnnotationAction, UpdateGroupHandling} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.MergeTreeSkeletonAction +import org.scalatestplus.play.PlaySpec + +class UpdateGroupHandlingUnitTestSuite extends PlaySpec with UpdateGroupHandling { + + "regroup" should { + "work" in { + val updateGroupsBefore = List( + (5L, + List( + MergeTreeSkeletonAction(sourceId = 1, targetId = 2, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 2, targetId = 3, actionTracingId = Dummies.tracingId) + )), + (6L, + List( + RevertToVersionAnnotationAction(sourceVersion = 1) + )), + (7L, + List( + MergeTreeSkeletonAction(sourceId = 1, targetId = 2, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 2, targetId = 3, actionTracingId = Dummies.tracingId) + )), + (8L, + List( + MergeTreeSkeletonAction(sourceId = 1, targetId = 2, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 2, targetId = 3, actionTracingId = Dummies.tracingId) + )) + ) + val res = regroupByIsolationSensitiveActions(updateGroupsBefore) + assert(res.length == 3) + assert(res(1)._2.length == 1) + assert(res(1)._1 == 6L) + } + } + + "ironOutReverts" should { + "work" in { + val updateGroupsBefore = List( + (6L, + List( + MergeTreeSkeletonAction(sourceId = 7, targetId = 7, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 8, targetId = 8, actionTracingId = Dummies.tracingId) + )), + (5L, + List( + RevertToVersionAnnotationAction(sourceVersion = 2) + )), + (4L, + List( + // Should be dropped, since we jump from 5 to 2 + RevertToVersionAnnotationAction(sourceVersion = 1) + )), + (3L, + List( + // Should be dropped, since we jump from 5 to 2 + MergeTreeSkeletonAction(sourceId = 5, targetId = 5, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 6, targetId = 6, actionTracingId = Dummies.tracingId) + )), + (2L, + List( + MergeTreeSkeletonAction(sourceId = 3, targetId = 3, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 4, targetId = 4, actionTracingId = Dummies.tracingId) + )), + (1L, + List( + MergeTreeSkeletonAction(sourceId = 1, targetId = 1, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 2, targetId = 2, actionTracingId = Dummies.tracingId) + )) + ) + + val res = ironOutReverts(updateGroupsBefore) + assert(res.length == 6) + assert( + res.headOption.contains( + MergeTreeSkeletonAction(sourceId = 1, targetId = 1, actionTracingId = Dummies.tracingId))) + assert( + res.lastOption.contains( + MergeTreeSkeletonAction(sourceId = 8, targetId = 8, actionTracingId = Dummies.tracingId))) + assert(!res.contains(MergeTreeSkeletonAction(sourceId = 6, targetId = 6, actionTracingId = Dummies.tracingId))) + } + } + +} diff --git a/test/backend/VolumeBucketKeyTestSuite.scala b/test/backend/VolumeBucketKeyTestSuite.scala index 48db63a93f3..0e2d0a76f8b 100644 --- a/test/backend/VolumeBucketKeyTestSuite.scala +++ b/test/backend/VolumeBucketKeyTestSuite.scala @@ -26,14 +26,14 @@ class VolumeBucketKeyTestSuite extends PlaySpec { val bucketPos = BucketPosition(32, 64, 96, Vec3Int(1, 1, 1), None) "match defined bucket key" in { val key = bucketKeyBuilder.build(layerName, bucketPos) - assert(key == s"$layerName/1/53-[1,2,3]") + assert(key == s"$layerName/1/[1,2,3]") } "expands mag when anisotropic" in { val key = bucketKeyBuilder.build(layerName, BucketPosition(32, 64, 96, Vec3Int(4, 4, 1), None)) - assert(key == s"$layerName/4-4-1/36-[0,0,3]") + assert(key == s"$layerName/4-4-1/[0,0,3]") } "is parsed as the same bucket position" in { - bucketKeyBuilder.parse(s"$layerName/1/53-[1,2,3]", None) match { + bucketKeyBuilder.parse(s"$layerName/1/[1,2,3]", None) match { case Some((layer, parsedPos)) => assert(layer == layerName) assert(parsedPos == bucketPos) @@ -56,10 +56,10 @@ class VolumeBucketKeyTestSuite extends PlaySpec { bucketPos, Some(additionalAxes) ) - assert(key == s"$layerName/1/53-[4,5][1,2,3]") + assert(key == s"$layerName/1/[4,5][1,2,3]") } "is parsed as the same bucket position" in { - bucketKeyBuilder.parse(s"$layerName/1/53-[4,5][1,2,3]", Some(additionalAxes)) match { + bucketKeyBuilder.parse(s"$layerName/1/[4,5][1,2,3]", Some(additionalAxes)) match { case Some((layer, parsedPos)) => assert(layer == layerName) assert(parsedPos == bucketPos) @@ -73,7 +73,7 @@ class VolumeBucketKeyTestSuite extends PlaySpec { BucketPosition(32, 64, 96, Vec3Int(1, 1, 1), Some(additionalCoordinates.reverse)), Some(additionalAxes) ) - assert(key == s"$layerName/1/53-[4,5][1,2,3]") + assert(key == s"$layerName/1/[4,5][1,2,3]") } } } diff --git a/test/backend/VolumeUpdateActionsUnitTestSuite.scala b/test/backend/VolumeUpdateActionsUnitTestSuite.scala index 91459fc614b..35dd3f9b0b4 100644 --- a/test/backend/VolumeUpdateActionsUnitTestSuite.scala +++ b/test/backend/VolumeUpdateActionsUnitTestSuite.scala @@ -3,8 +3,8 @@ package backend import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import com.scalableminds.webknossos.tracingstore.tracings.UpdateAction import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + ApplyableVolumeUpdateAction, CreateSegmentVolumeAction, DeleteSegmentVolumeAction, UpdateActionSegmentGroup, @@ -15,7 +15,7 @@ import org.scalatestplus.play._ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplicits { - private def applyUpdateAction(action: UpdateAction.VolumeUpdateAction): VolumeTracing = + private def applyUpdateAction(action: ApplyableVolumeUpdateAction): VolumeTracing = action.applyOn(Dummies.volumeTracing) "CreateSegmentVolumeAction" should { @@ -26,7 +26,8 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic color = None, name = Some("aSegment"), groupId = Some(1), - creationTime = Some(Dummies.timestampLong) + creationTime = Some(Dummies.timestampLong), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(createSegmentAction) @@ -39,7 +40,7 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic "DeleteSegmentVolumeAction" should { "delete the specified segment" in { - val deleteSegmentAction = DeleteSegmentVolumeAction(id = 5) + val deleteSegmentAction = DeleteSegmentVolumeAction(id = 5, actionTracingId = Dummies.tracingId) val result = applyUpdateAction(deleteSegmentAction) assert(result.segments.length == Dummies.volumeTracing.segments.length - 1) @@ -58,7 +59,8 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic name = Some("aRenamedSegment"), color = None, creationTime = Some(Dummies.timestampLong), - groupId = None + groupId = None, + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateSegmentAction) @@ -76,7 +78,8 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic "update a top level segment group" in { val updatedName = "Segment Group 2 updated" val updateSegmentGroupsVolumeAction = new UpdateSegmentGroupsVolumeAction( - List(UpdateActionSegmentGroup(updatedName, 2, isExpanded = Some(true), List())) + List(UpdateActionSegmentGroup(updatedName, 2, isExpanded = Some(true), List())), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateSegmentGroupsVolumeAction) assert(result.segments == Dummies.volumeTracing.segments) @@ -87,7 +90,13 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic val updatedNameTop = "Segment Group 1 updated" val updatedNameNested = "Segment Group 3 updated" val updateSegmentGroupsVolumeAction = new UpdateSegmentGroupsVolumeAction( - List(UpdateActionSegmentGroup(updatedNameTop, 1, isExpanded = Some(true), List(UpdateActionSegmentGroup(updatedNameNested, 3, isExpanded = Some(false), List())))) + List( + UpdateActionSegmentGroup( + updatedNameTop, + 1, + isExpanded = Some(true), + List(UpdateActionSegmentGroup(updatedNameNested, 3, isExpanded = Some(false), List())))), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateSegmentGroupsVolumeAction) assert(result.segments == Dummies.volumeTracing.segments) diff --git a/tools/migrate-editable-mappings/SegmentToAgglomerateProto_pb2.py b/tools/migrate-editable-mappings/SegmentToAgglomerateProto_pb2.py index 91d2140c7b3..d7b553b51d0 100644 --- a/tools/migrate-editable-mappings/SegmentToAgglomerateProto_pb2.py +++ b/tools/migrate-editable-mappings/SegmentToAgglomerateProto_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: SegmentToAgglomerateProto.proto +# source: SegmentToAgglomerateChunkProto.proto """Generated protocol buffer code.""" from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor @@ -13,15 +13,15 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fSegmentToAgglomerateProto.proto\x12&com.scalableminds.webknossos.datastore\"B\n\x16SegmentAgglomeratePair\x12\x11\n\tsegmentId\x18\x01 \x02(\x03\x12\x15\n\ragglomerateId\x18\x02 \x02(\x03\"y\n\x19SegmentToAgglomerateProto\x12\\\n\x14segmentToAgglomerate\x18\x01 \x03(\x0b\x32>.com.scalableminds.webknossos.datastore.SegmentAgglomeratePair') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fSegmentToAgglomerateChunkProto.proto\x12&com.scalableminds.webknossos.datastore\"B\n\x16SegmentAgglomeratePair\x12\x11\n\tsegmentId\x18\x01 \x02(\x03\x12\x15\n\ragglomerateId\x18\x02 \x02(\x03\"y\n\x19SegmentToAgglomerateChunkProto\x12\\\n\x14segmentToAgglomerate\x18\x01 \x03(\x0b\x32>.com.scalableminds.webknossos.datastore.SegmentAgglomeratePair') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'SegmentToAgglomerateProto_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'SegmentToAgglomerateChunkProto_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None _SEGMENTAGGLOMERATEPAIR._serialized_start=75 _SEGMENTAGGLOMERATEPAIR._serialized_end=141 - _SEGMENTTOAGGLOMERATEPROTO._serialized_start=143 - _SEGMENTTOAGGLOMERATEPROTO._serialized_end=264 + _SegmentToAgglomerateChunkProto._serialized_start=143 + _SegmentToAgglomerateChunkProto._serialized_end=264 # @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/.gitignore b/tools/migration-unified-annotation-versioning/.gitignore new file mode 100644 index 00000000000..f18dc3a725d --- /dev/null +++ b/tools/migration-unified-annotation-versioning/.gitignore @@ -0,0 +1,5 @@ +counts.py +*.log +*.csv +logs/ +*.dat diff --git a/tools/migration-unified-annotation-versioning/Annotation_pb2.py b/tools/migration-unified-annotation-versioning/Annotation_pb2.py new file mode 100644 index 00000000000..2bae792f46a --- /dev/null +++ b/tools/migration-unified-annotation-versioning/Annotation_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: Annotation.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x41nnotation.proto\x12&com.scalableminds.webknossos.datastore\"\x88\x02\n\x0f\x41nnotationProto\x12\x13\n\x0b\x64\x65scription\x18\x01 \x02(\t\x12\x0f\n\x07version\x18\x02 \x02(\x03\x12V\n\x10\x61nnotationLayers\x18\x03 \x03(\x0b\x32<.com.scalableminds.webknossos.datastore.AnnotationLayerProto\x12!\n\x19\x65\x61rliestAccessibleVersion\x18\x04 \x02(\x03\x12%\n\x1dskeletonMayHavePendingUpdates\x18\x05 \x01(\x08\x12-\n%editableMappingsMayHavePendingUpdates\x18\x06 \x01(\x08\"\x86\x01\n\x14\x41nnotationLayerProto\x12\x11\n\ttracingId\x18\x01 \x02(\t\x12\x0c\n\x04name\x18\x02 \x02(\t\x12M\n\x03typ\x18\x03 \x02(\x0e\x32@.com.scalableminds.webknossos.datastore.AnnotationLayerTypeProto*4\n\x18\x41nnotationLayerTypeProto\x12\x0c\n\x08Skeleton\x10\x01\x12\n\n\x06Volume\x10\x02') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'Annotation_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _ANNOTATIONLAYERTYPEPROTO._serialized_start=464 + _ANNOTATIONLAYERTYPEPROTO._serialized_end=516 + _ANNOTATIONPROTO._serialized_start=61 + _ANNOTATIONPROTO._serialized_end=325 + _ANNOTATIONLAYERPROTO._serialized_start=328 + _ANNOTATIONLAYERPROTO._serialized_end=462 +# @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/MetadataEntry_pb2.py b/tools/migration-unified-annotation-versioning/MetadataEntry_pb2.py new file mode 100644 index 00000000000..44b93186fa6 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/MetadataEntry_pb2.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: MetadataEntry.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13MetadataEntry.proto\x12&com.scalableminds.webknossos.datastore\"w\n\x12MetadataEntryProto\x12\x0b\n\x03key\x18\x01 \x02(\t\x12\x13\n\x0bstringValue\x18\x02 \x01(\t\x12\x11\n\tboolValue\x18\x03 \x01(\x08\x12\x13\n\x0bnumberValue\x18\x04 \x01(\x01\x12\x17\n\x0fstringListValue\x18\x05 \x03(\t') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'MetadataEntry_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _METADATAENTRYPROTO._serialized_start=63 + _METADATAENTRYPROTO._serialized_end=182 +# @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/SkeletonTracing_pb2.py b/tools/migration-unified-annotation-versioning/SkeletonTracing_pb2.py new file mode 100644 index 00000000000..daac9b940f1 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/SkeletonTracing_pb2.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: SkeletonTracing.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +import geometry_pb2 as geometry__pb2 +import MetadataEntry_pb2 as MetadataEntry__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15SkeletonTracing.proto\x12&com.scalableminds.webknossos.datastore\x1a\x0egeometry.proto\x1a\x13MetadataEntry.proto\"\xf9\x02\n\x04Node\x12\n\n\x02id\x18\x01 \x02(\x05\x12\x46\n\x08position\x18\x02 \x02(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec3IntProto\x12I\n\x08rotation\x18\x03 \x02(\x0b\x32\x37.com.scalableminds.webknossos.datastore.Vec3DoubleProto\x12\x0e\n\x06radius\x18\x04 \x02(\x02\x12\x10\n\x08viewport\x18\x05 \x02(\x05\x12\x0b\n\x03mag\x18\x06 \x02(\x05\x12\x10\n\x08\x62itDepth\x18\x07 \x02(\x05\x12\x15\n\rinterpolation\x18\x08 \x02(\x08\x12\x18\n\x10\x63reatedTimestamp\x18\t \x02(\x03\x12`\n\x15\x61\x64\x64itionalCoordinates\x18\n \x03(\x0b\x32\x41.com.scalableminds.webknossos.datastore.AdditionalCoordinateProto\"&\n\x04\x45\x64ge\x12\x0e\n\x06source\x18\x01 \x02(\x05\x12\x0e\n\x06target\x18\x02 \x02(\x05\"*\n\x07\x43omment\x12\x0e\n\x06nodeId\x18\x01 \x02(\x05\x12\x0f\n\x07\x63ontent\x18\x02 \x02(\t\"7\n\x0b\x42ranchPoint\x12\x0e\n\x06nodeId\x18\x01 \x02(\x05\x12\x18\n\x10\x63reatedTimestamp\x18\x02 \x02(\x03\"\xd9\x04\n\x04Tree\x12\x0e\n\x06treeId\x18\x01 \x02(\x05\x12;\n\x05nodes\x18\x02 \x03(\x0b\x32,.com.scalableminds.webknossos.datastore.Node\x12;\n\x05\x65\x64ges\x18\x03 \x03(\x0b\x32,.com.scalableminds.webknossos.datastore.Edge\x12\x41\n\x05\x63olor\x18\x04 \x01(\x0b\x32\x32.com.scalableminds.webknossos.datastore.ColorProto\x12I\n\x0c\x62ranchPoints\x18\x05 \x03(\x0b\x32\x33.com.scalableminds.webknossos.datastore.BranchPoint\x12\x41\n\x08\x63omments\x18\x06 \x03(\x0b\x32/.com.scalableminds.webknossos.datastore.Comment\x12\x0c\n\x04name\x18\x07 \x02(\t\x12\x18\n\x10\x63reatedTimestamp\x18\x08 \x02(\x03\x12\x0f\n\x07groupId\x18\t \x01(\x05\x12\x11\n\tisVisible\x18\n \x01(\x08\x12\x43\n\x04type\x18\x0b \x01(\x0e\x32\x35.com.scalableminds.webknossos.datastore.TreeTypeProto\x12\x17\n\x0f\x65\x64gesAreVisible\x18\x0c \x01(\x08\x12L\n\x08metadata\x18\r \x03(\x0b\x32:.com.scalableminds.webknossos.datastore.MetadataEntryProto\"\x83\x01\n\tTreeGroup\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x0f\n\x07groupId\x18\x02 \x02(\x05\x12\x43\n\x08\x63hildren\x18\x03 \x03(\x0b\x32\x31.com.scalableminds.webknossos.datastore.TreeGroup\x12\x12\n\nisExpanded\x18\x04 \x01(\x08\"\xf0\x06\n\x0fSkeletonTracing\x12\x13\n\x0b\x64\x61tasetName\x18\x01 \x02(\t\x12;\n\x05trees\x18\x02 \x03(\x0b\x32,.com.scalableminds.webknossos.datastore.Tree\x12\x18\n\x10\x63reatedTimestamp\x18\x03 \x02(\x03\x12M\n\x0b\x62oundingBox\x18\x04 \x01(\x0b\x32\x38.com.scalableminds.webknossos.datastore.BoundingBoxProto\x12\x14\n\x0c\x61\x63tiveNodeId\x18\x05 \x01(\x05\x12J\n\x0c\x65\x64itPosition\x18\x06 \x02(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec3IntProto\x12M\n\x0c\x65\x64itRotation\x18\x07 \x02(\x0b\x32\x37.com.scalableminds.webknossos.datastore.Vec3DoubleProto\x12\x11\n\tzoomLevel\x18\x08 \x02(\x01\x12\x0f\n\x07version\x18\t \x02(\x03\x12Q\n\x0fuserBoundingBox\x18\n \x01(\x0b\x32\x38.com.scalableminds.webknossos.datastore.BoundingBoxProto\x12\x45\n\ntreeGroups\x18\x0b \x03(\x0b\x32\x31.com.scalableminds.webknossos.datastore.TreeGroup\x12X\n\x11userBoundingBoxes\x18\x0c \x03(\x0b\x32=.com.scalableminds.webknossos.datastore.NamedBoundingBoxProto\x12\x16\n\x0eorganizationId\x18\r \x01(\t\x12l\n!editPositionAdditionalCoordinates\x18\x15 \x03(\x0b\x32\x41.com.scalableminds.webknossos.datastore.AdditionalCoordinateProto\x12S\n\x0e\x61\x64\x64itionalAxes\x18\x16 \x03(\x0b\x32;.com.scalableminds.webknossos.datastore.AdditionalAxisProto\"^\n\x12SkeletonTracingOpt\x12H\n\x07tracing\x18\x01 \x01(\x0b\x32\x37.com.scalableminds.webknossos.datastore.SkeletonTracing\"`\n\x10SkeletonTracings\x12L\n\x08tracings\x18\x01 \x03(\x0b\x32:.com.scalableminds.webknossos.datastore.SkeletonTracingOpt*-\n\rTreeTypeProto\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x0f\n\x0b\x41GGLOMERATE\x10\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'SkeletonTracing_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _TREETYPEPROTO._serialized_start=2438 + _TREETYPEPROTO._serialized_end=2483 + _NODE._serialized_start=103 + _NODE._serialized_end=480 + _EDGE._serialized_start=482 + _EDGE._serialized_end=520 + _COMMENT._serialized_start=522 + _COMMENT._serialized_end=564 + _BRANCHPOINT._serialized_start=566 + _BRANCHPOINT._serialized_end=621 + _TREE._serialized_start=624 + _TREE._serialized_end=1225 + _TREEGROUP._serialized_start=1228 + _TREEGROUP._serialized_end=1359 + _SKELETONTRACING._serialized_start=1362 + _SKELETONTRACING._serialized_end=2242 + _SKELETONTRACINGOPT._serialized_start=2244 + _SKELETONTRACINGOPT._serialized_end=2338 + _SKELETONTRACINGS._serialized_start=2340 + _SKELETONTRACINGS._serialized_end=2436 +# @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/VolumeTracing_pb2.py b/tools/migration-unified-annotation-versioning/VolumeTracing_pb2.py new file mode 100644 index 00000000000..1c917e7cf49 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/VolumeTracing_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: VolumeTracing.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +import geometry_pb2 as geometry__pb2 +import MetadataEntry_pb2 as MetadataEntry__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13VolumeTracing.proto\x12&com.scalableminds.webknossos.datastore\x1a\x0egeometry.proto\x1a\x13MetadataEntry.proto\"\xa0\x03\n\x07Segment\x12\x11\n\tsegmentId\x18\x01 \x02(\x03\x12L\n\x0e\x61nchorPosition\x18\x02 \x01(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec3IntProto\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x14\n\x0c\x63reationTime\x18\x04 \x01(\x03\x12\x41\n\x05\x63olor\x18\x05 \x01(\x0b\x32\x32.com.scalableminds.webknossos.datastore.ColorProto\x12\x0f\n\x07groupId\x18\x06 \x01(\x05\x12n\n#anchorPositionAdditionalCoordinates\x18\x07 \x03(\x0b\x32\x41.com.scalableminds.webknossos.datastore.AdditionalCoordinateProto\x12L\n\x08metadata\x18\x0b \x03(\x0b\x32:.com.scalableminds.webknossos.datastore.MetadataEntryProto\"\x84\n\n\rVolumeTracing\x12\x17\n\x0f\x61\x63tiveSegmentId\x18\x01 \x01(\x03\x12M\n\x0b\x62oundingBox\x18\x02 \x02(\x0b\x32\x38.com.scalableminds.webknossos.datastore.BoundingBoxProto\x12\x18\n\x10\x63reatedTimestamp\x18\x03 \x02(\x03\x12\x13\n\x0b\x64\x61tasetName\x18\x04 \x02(\t\x12J\n\x0c\x65\x64itPosition\x18\x05 \x02(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec3IntProto\x12M\n\x0c\x65\x64itRotation\x18\x06 \x02(\x0b\x32\x37.com.scalableminds.webknossos.datastore.Vec3DoubleProto\x12]\n\x0c\x65lementClass\x18\x07 \x02(\x0e\x32G.com.scalableminds.webknossos.datastore.VolumeTracing.ElementClassProto\x12\x15\n\rfallbackLayer\x18\x08 \x01(\t\x12\x18\n\x10largestSegmentId\x18\t \x01(\x03\x12\x0f\n\x07version\x18\n \x02(\x03\x12\x11\n\tzoomLevel\x18\x0b \x02(\x01\x12Q\n\x0fuserBoundingBox\x18\x0c \x01(\x0b\x32\x38.com.scalableminds.webknossos.datastore.BoundingBoxProto\x12X\n\x11userBoundingBoxes\x18\r \x03(\x0b\x32=.com.scalableminds.webknossos.datastore.NamedBoundingBoxProto\x12\x16\n\x0eorganizationId\x18\x0e \x01(\t\x12\x42\n\x04mags\x18\x0f \x03(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec3IntProto\x12\x41\n\x08segments\x18\x10 \x03(\x0b\x32/.com.scalableminds.webknossos.datastore.Segment\x12\x13\n\x0bmappingName\x18\x11 \x01(\t\x12\x1a\n\x12hasEditableMapping\x18\x12 \x01(\x08\x12K\n\rsegmentGroups\x18\x13 \x03(\x0b\x32\x34.com.scalableminds.webknossos.datastore.SegmentGroup\x12\x17\n\x0fhasSegmentIndex\x18\x14 \x01(\x08\x12l\n!editPositionAdditionalCoordinates\x18\x15 \x03(\x0b\x32\x41.com.scalableminds.webknossos.datastore.AdditionalCoordinateProto\x12S\n\x0e\x61\x64\x64itionalAxes\x18\x16 \x03(\x0b\x32;.com.scalableminds.webknossos.datastore.AdditionalAxisProto\x12\x17\n\x0fmappingIsLocked\x18\x17 \x01(\x08\"N\n\x11\x45lementClassProto\x12\t\n\x05uint8\x10\x01\x12\n\n\x06uint16\x10\x02\x12\n\n\x06uint24\x10\x03\x12\n\n\x06uint32\x10\x04\x12\n\n\x06uint64\x10\x08\"\x89\x01\n\x0cSegmentGroup\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x0f\n\x07groupId\x18\x02 \x02(\x05\x12\x46\n\x08\x63hildren\x18\x03 \x03(\x0b\x32\x34.com.scalableminds.webknossos.datastore.SegmentGroup\x12\x12\n\nisExpanded\x18\x04 \x01(\x08\"Z\n\x10VolumeTracingOpt\x12\x46\n\x07tracing\x18\x01 \x01(\x0b\x32\x35.com.scalableminds.webknossos.datastore.VolumeTracing\"\\\n\x0eVolumeTracings\x12J\n\x08tracings\x18\x01 \x03(\x0b\x32\x38.com.scalableminds.webknossos.datastore.VolumeTracingOpt') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'VolumeTracing_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _SEGMENT._serialized_start=101 + _SEGMENT._serialized_end=517 + _VOLUMETRACING._serialized_start=520 + _VOLUMETRACING._serialized_end=1804 + _VOLUMETRACING_ELEMENTCLASSPROTO._serialized_start=1726 + _VOLUMETRACING_ELEMENTCLASSPROTO._serialized_end=1804 + _SEGMENTGROUP._serialized_start=1807 + _SEGMENTGROUP._serialized_end=1944 + _VOLUMETRACINGOPT._serialized_start=1946 + _VOLUMETRACINGOPT._serialized_end=2036 + _VOLUMETRACINGS._serialized_start=2038 + _VOLUMETRACINGS._serialized_end=2130 +# @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/connections.py b/tools/migration-unified-annotation-versioning/connections.py new file mode 100644 index 00000000000..5aa4b884317 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/connections.py @@ -0,0 +1,52 @@ +import grpc +import psycopg2 +import psycopg2.extras +import logging +import re +from typing import Dict, Any +import os + +import fossildbapi_pb2 as proto +import fossildbapi_pb2_grpc as proto_rpc + +logger = logging.getLogger(__name__) + + +def connect_to_fossildb(host: str, label: str): + max_message_length = 2147483647 # 2G + channel = grpc.insecure_channel(host, options=[("grpc.max_send_message_length", max_message_length), ("grpc.max_receive_message_length", max_message_length)]) + stub = proto_rpc.FossilDBStub(channel) + test_fossildb_health(stub, f"{label} FossilDB at {host}") + return stub + + +def test_fossildb_health(stub, label): + reply = stub.Health(proto.HealthRequest()) + assert_grpc_success(reply) + logger.info(f"Successfully connected to {label}") + + +def assert_grpc_success(reply): + if not reply.success: + raise Exception("reply.success failed: " + reply.errorMessage) + + +def connect_to_postgres(postgres_config: str): + parsed = parse_connection_string(postgres_config) + password = os.environ.get("PG_PASSWORD", "postgres") + return psycopg2.connect(host=parsed["host"], port=parsed["port"], database=parsed["database"], user=parsed["user"], password=password) + + +def parse_connection_string(connection_string: str) -> Dict[str, Any]: + pattern = r"^(?P\w+)@(?!.*@)(?P[^:/]+)(?::(?P\d+))?(?P/[^ ]*)?$" + + match = re.match(pattern, connection_string.removeprefix("postgresql://")) + if match: + return { + "user": match.group("user"), + "host": match.group("host"), + "port": int(match.group("port")), + "database": match.group("database").lstrip("/") + } + else: + raise ValueError("Invalid postgres connection string, needs to be postgresql://user@host:port/database.") diff --git a/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py b/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py new file mode 100644 index 00000000000..6267f9144b6 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: fossildbapi.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x66ossildbapi.proto\x12 com.scalableminds.fossildb.proto\"\x0f\n\rHealthRequest\"4\n\x0bHealthReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"R\n\nGetRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x01(\x04\x12\x12\n\nmayBeEmpty\x18\x04 \x01(\x08\"W\n\x08GetReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x02(\x0c\x12\x15\n\ractualVersion\x18\x04 \x02(\x04\"M\n\nPutRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x01(\x04\x12\r\n\x05value\x18\x04 \x02(\x0c\"1\n\x08PutReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"_\n\x1aPutMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x10\n\x08versions\x18\x03 \x03(\x04\x12\x0e\n\x06values\x18\x04 \x03(\x0c\"A\n\x18PutMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"A\n\rDeleteRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x02(\x04\"4\n\x0b\x44\x65leteReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\">\n\x18\x44\x65leteAllByPrefixRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0e\n\x06prefix\x18\x02 \x02(\t\"?\n\x16\x44\x65leteAllByPrefixReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"k\n\x1aGetMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x15\n\rnewestVersion\x18\x04 \x01(\x04\x12\x15\n\roldestVersion\x18\x03 \x01(\x04\"c\n\x18GetMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0e\n\x06values\x18\x03 \x03(\x0c\x12\x10\n\x08versions\x18\x04 \x03(\x04\"s\n\x16GetMultipleKeysRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x15\n\rstartAfterKey\x18\x02 \x01(\t\x12\x0e\n\x06prefix\x18\x03 \x01(\t\x12\x0f\n\x07version\x18\x04 \x01(\x04\x12\r\n\x05limit\x18\x05 \x01(\r\"s\n\x14GetMultipleKeysReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0c\n\x04keys\x18\x03 \x03(\t\x12\x0e\n\x06values\x18\x04 \x03(\x0c\x12\x16\n\x0e\x61\x63tualVersions\x18\x05 \x03(\x04\"n\n\x1d\x44\x65leteMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x15\n\rnewestVersion\x18\x04 \x01(\x04\x12\x15\n\roldestVersion\x18\x03 \x01(\x04\"D\n\x1b\x44\x65leteMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"K\n\x0fListKeysRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\r\n\x05limit\x18\x02 \x01(\r\x12\x15\n\rstartAfterKey\x18\x03 \x01(\t\"D\n\rListKeysReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0c\n\x04keys\x18\x03 \x03(\t\"U\n\x13ListVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\r\n\x05limit\x18\x03 \x01(\r\x12\x0e\n\x06offset\x18\x04 \x01(\r\"L\n\x11ListVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x10\n\x08versions\x18\x03 \x03(\x04\"\x0f\n\rBackupRequest\"a\n\x0b\x42\x61\x63kupReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x02(\r\x12\x11\n\ttimestamp\x18\x04 \x02(\x04\x12\x0c\n\x04size\x18\x05 \x02(\x04\"\x1a\n\x18RestoreFromBackupRequest\"?\n\x16RestoreFromBackupReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"\x17\n\x15\x43ompactAllDataRequest\"<\n\x13\x43ompactAllDataReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\":\n\x0f\x45xportDBRequest\x12\x12\n\nnewDataDir\x18\x01 \x02(\t\x12\x13\n\x0boptionsFile\x18\x02 \x01(\t\"6\n\rExportDBReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t2\xe4\x0e\n\x08\x46ossilDB\x12j\n\x06Health\x12/.com.scalableminds.fossildb.proto.HealthRequest\x1a-.com.scalableminds.fossildb.proto.HealthReply\"\x00\x12\x61\n\x03Get\x12,.com.scalableminds.fossildb.proto.GetRequest\x1a*.com.scalableminds.fossildb.proto.GetReply\"\x00\x12\x91\x01\n\x13GetMultipleVersions\x12<.com.scalableminds.fossildb.proto.GetMultipleVersionsRequest\x1a:.com.scalableminds.fossildb.proto.GetMultipleVersionsReply\"\x00\x12\x85\x01\n\x0fGetMultipleKeys\x12\x38.com.scalableminds.fossildb.proto.GetMultipleKeysRequest\x1a\x36.com.scalableminds.fossildb.proto.GetMultipleKeysReply\"\x00\x12\x61\n\x03Put\x12,.com.scalableminds.fossildb.proto.PutRequest\x1a*.com.scalableminds.fossildb.proto.PutReply\"\x00\x12\x91\x01\n\x13PutMultipleVersions\x12<.com.scalableminds.fossildb.proto.PutMultipleVersionsRequest\x1a:.com.scalableminds.fossildb.proto.PutMultipleVersionsReply\"\x00\x12j\n\x06\x44\x65lete\x12/.com.scalableminds.fossildb.proto.DeleteRequest\x1a-.com.scalableminds.fossildb.proto.DeleteReply\"\x00\x12\x9a\x01\n\x16\x44\x65leteMultipleVersions\x12?.com.scalableminds.fossildb.proto.DeleteMultipleVersionsRequest\x1a=.com.scalableminds.fossildb.proto.DeleteMultipleVersionsReply\"\x00\x12\x8b\x01\n\x11\x44\x65leteAllByPrefix\x12:.com.scalableminds.fossildb.proto.DeleteAllByPrefixRequest\x1a\x38.com.scalableminds.fossildb.proto.DeleteAllByPrefixReply\"\x00\x12p\n\x08ListKeys\x12\x31.com.scalableminds.fossildb.proto.ListKeysRequest\x1a/.com.scalableminds.fossildb.proto.ListKeysReply\"\x00\x12|\n\x0cListVersions\x12\x35.com.scalableminds.fossildb.proto.ListVersionsRequest\x1a\x33.com.scalableminds.fossildb.proto.ListVersionsReply\"\x00\x12j\n\x06\x42\x61\x63kup\x12/.com.scalableminds.fossildb.proto.BackupRequest\x1a-.com.scalableminds.fossildb.proto.BackupReply\"\x00\x12\x8b\x01\n\x11RestoreFromBackup\x12:.com.scalableminds.fossildb.proto.RestoreFromBackupRequest\x1a\x38.com.scalableminds.fossildb.proto.RestoreFromBackupReply\"\x00\x12\x82\x01\n\x0e\x43ompactAllData\x12\x37.com.scalableminds.fossildb.proto.CompactAllDataRequest\x1a\x35.com.scalableminds.fossildb.proto.CompactAllDataReply\"\x00\x12p\n\x08\x45xportDB\x12\x31.com.scalableminds.fossildb.proto.ExportDBRequest\x1a/.com.scalableminds.fossildb.proto.ExportDBReply\"\x00') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'fossildbapi_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _HEALTHREQUEST._serialized_start=55 + _HEALTHREQUEST._serialized_end=70 + _HEALTHREPLY._serialized_start=72 + _HEALTHREPLY._serialized_end=124 + _GETREQUEST._serialized_start=126 + _GETREQUEST._serialized_end=208 + _GETREPLY._serialized_start=210 + _GETREPLY._serialized_end=297 + _PUTREQUEST._serialized_start=299 + _PUTREQUEST._serialized_end=376 + _PUTREPLY._serialized_start=378 + _PUTREPLY._serialized_end=427 + _PUTMULTIPLEVERSIONSREQUEST._serialized_start=429 + _PUTMULTIPLEVERSIONSREQUEST._serialized_end=524 + _PUTMULTIPLEVERSIONSREPLY._serialized_start=526 + _PUTMULTIPLEVERSIONSREPLY._serialized_end=591 + _DELETEREQUEST._serialized_start=593 + _DELETEREQUEST._serialized_end=658 + _DELETEREPLY._serialized_start=660 + _DELETEREPLY._serialized_end=712 + _DELETEALLBYPREFIXREQUEST._serialized_start=714 + _DELETEALLBYPREFIXREQUEST._serialized_end=776 + _DELETEALLBYPREFIXREPLY._serialized_start=778 + _DELETEALLBYPREFIXREPLY._serialized_end=841 + _GETMULTIPLEVERSIONSREQUEST._serialized_start=843 + _GETMULTIPLEVERSIONSREQUEST._serialized_end=950 + _GETMULTIPLEVERSIONSREPLY._serialized_start=952 + _GETMULTIPLEVERSIONSREPLY._serialized_end=1051 + _GETMULTIPLEKEYSREQUEST._serialized_start=1053 + _GETMULTIPLEKEYSREQUEST._serialized_end=1168 + _GETMULTIPLEKEYSREPLY._serialized_start=1170 + _GETMULTIPLEKEYSREPLY._serialized_end=1285 + _DELETEMULTIPLEVERSIONSREQUEST._serialized_start=1287 + _DELETEMULTIPLEVERSIONSREQUEST._serialized_end=1397 + _DELETEMULTIPLEVERSIONSREPLY._serialized_start=1399 + _DELETEMULTIPLEVERSIONSREPLY._serialized_end=1467 + _LISTKEYSREQUEST._serialized_start=1469 + _LISTKEYSREQUEST._serialized_end=1544 + _LISTKEYSREPLY._serialized_start=1546 + _LISTKEYSREPLY._serialized_end=1614 + _LISTVERSIONSREQUEST._serialized_start=1616 + _LISTVERSIONSREQUEST._serialized_end=1701 + _LISTVERSIONSREPLY._serialized_start=1703 + _LISTVERSIONSREPLY._serialized_end=1779 + _BACKUPREQUEST._serialized_start=1781 + _BACKUPREQUEST._serialized_end=1796 + _BACKUPREPLY._serialized_start=1798 + _BACKUPREPLY._serialized_end=1895 + _RESTOREFROMBACKUPREQUEST._serialized_start=1897 + _RESTOREFROMBACKUPREQUEST._serialized_end=1923 + _RESTOREFROMBACKUPREPLY._serialized_start=1925 + _RESTOREFROMBACKUPREPLY._serialized_end=1988 + _COMPACTALLDATAREQUEST._serialized_start=1990 + _COMPACTALLDATAREQUEST._serialized_end=2013 + _COMPACTALLDATAREPLY._serialized_start=2015 + _COMPACTALLDATAREPLY._serialized_end=2075 + _EXPORTDBREQUEST._serialized_start=2077 + _EXPORTDBREQUEST._serialized_end=2135 + _EXPORTDBREPLY._serialized_start=2137 + _EXPORTDBREPLY._serialized_end=2191 + _FOSSILDB._serialized_start=2194 + _FOSSILDB._serialized_end=4086 +# @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py b/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py new file mode 100644 index 00000000000..afb3aac4a4a --- /dev/null +++ b/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py @@ -0,0 +1,528 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +import fossildbapi_pb2 as fossildbapi__pb2 + + +class FossilDBStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Health = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/Health', + request_serializer=fossildbapi__pb2.HealthRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.HealthReply.FromString, + ) + self.Get = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/Get', + request_serializer=fossildbapi__pb2.GetRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.GetReply.FromString, + ) + self.GetMultipleVersions = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/GetMultipleVersions', + request_serializer=fossildbapi__pb2.GetMultipleVersionsRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.GetMultipleVersionsReply.FromString, + ) + self.GetMultipleKeys = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/GetMultipleKeys', + request_serializer=fossildbapi__pb2.GetMultipleKeysRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.GetMultipleKeysReply.FromString, + ) + self.Put = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/Put', + request_serializer=fossildbapi__pb2.PutRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.PutReply.FromString, + ) + self.PutMultipleVersions = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/PutMultipleVersions', + request_serializer=fossildbapi__pb2.PutMultipleVersionsRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.PutMultipleVersionsReply.FromString, + ) + self.Delete = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/Delete', + request_serializer=fossildbapi__pb2.DeleteRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.DeleteReply.FromString, + ) + self.DeleteMultipleVersions = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/DeleteMultipleVersions', + request_serializer=fossildbapi__pb2.DeleteMultipleVersionsRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.DeleteMultipleVersionsReply.FromString, + ) + self.DeleteAllByPrefix = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/DeleteAllByPrefix', + request_serializer=fossildbapi__pb2.DeleteAllByPrefixRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.DeleteAllByPrefixReply.FromString, + ) + self.ListKeys = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/ListKeys', + request_serializer=fossildbapi__pb2.ListKeysRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.ListKeysReply.FromString, + ) + self.ListVersions = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/ListVersions', + request_serializer=fossildbapi__pb2.ListVersionsRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.ListVersionsReply.FromString, + ) + self.Backup = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/Backup', + request_serializer=fossildbapi__pb2.BackupRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.BackupReply.FromString, + ) + self.RestoreFromBackup = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/RestoreFromBackup', + request_serializer=fossildbapi__pb2.RestoreFromBackupRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.RestoreFromBackupReply.FromString, + ) + self.CompactAllData = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/CompactAllData', + request_serializer=fossildbapi__pb2.CompactAllDataRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.CompactAllDataReply.FromString, + ) + self.ExportDB = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/ExportDB', + request_serializer=fossildbapi__pb2.ExportDBRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.ExportDBReply.FromString, + ) + + +class FossilDBServicer(object): + """Missing associated documentation comment in .proto file.""" + + def Health(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Get(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetMultipleVersions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetMultipleKeys(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Put(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def PutMultipleVersions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Delete(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteMultipleVersions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteAllByPrefix(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListKeys(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListVersions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Backup(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RestoreFromBackup(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CompactAllData(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ExportDB(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_FossilDBServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Health': grpc.unary_unary_rpc_method_handler( + servicer.Health, + request_deserializer=fossildbapi__pb2.HealthRequest.FromString, + response_serializer=fossildbapi__pb2.HealthReply.SerializeToString, + ), + 'Get': grpc.unary_unary_rpc_method_handler( + servicer.Get, + request_deserializer=fossildbapi__pb2.GetRequest.FromString, + response_serializer=fossildbapi__pb2.GetReply.SerializeToString, + ), + 'GetMultipleVersions': grpc.unary_unary_rpc_method_handler( + servicer.GetMultipleVersions, + request_deserializer=fossildbapi__pb2.GetMultipleVersionsRequest.FromString, + response_serializer=fossildbapi__pb2.GetMultipleVersionsReply.SerializeToString, + ), + 'GetMultipleKeys': grpc.unary_unary_rpc_method_handler( + servicer.GetMultipleKeys, + request_deserializer=fossildbapi__pb2.GetMultipleKeysRequest.FromString, + response_serializer=fossildbapi__pb2.GetMultipleKeysReply.SerializeToString, + ), + 'Put': grpc.unary_unary_rpc_method_handler( + servicer.Put, + request_deserializer=fossildbapi__pb2.PutRequest.FromString, + response_serializer=fossildbapi__pb2.PutReply.SerializeToString, + ), + 'PutMultipleVersions': grpc.unary_unary_rpc_method_handler( + servicer.PutMultipleVersions, + request_deserializer=fossildbapi__pb2.PutMultipleVersionsRequest.FromString, + response_serializer=fossildbapi__pb2.PutMultipleVersionsReply.SerializeToString, + ), + 'Delete': grpc.unary_unary_rpc_method_handler( + servicer.Delete, + request_deserializer=fossildbapi__pb2.DeleteRequest.FromString, + response_serializer=fossildbapi__pb2.DeleteReply.SerializeToString, + ), + 'DeleteMultipleVersions': grpc.unary_unary_rpc_method_handler( + servicer.DeleteMultipleVersions, + request_deserializer=fossildbapi__pb2.DeleteMultipleVersionsRequest.FromString, + response_serializer=fossildbapi__pb2.DeleteMultipleVersionsReply.SerializeToString, + ), + 'DeleteAllByPrefix': grpc.unary_unary_rpc_method_handler( + servicer.DeleteAllByPrefix, + request_deserializer=fossildbapi__pb2.DeleteAllByPrefixRequest.FromString, + response_serializer=fossildbapi__pb2.DeleteAllByPrefixReply.SerializeToString, + ), + 'ListKeys': grpc.unary_unary_rpc_method_handler( + servicer.ListKeys, + request_deserializer=fossildbapi__pb2.ListKeysRequest.FromString, + response_serializer=fossildbapi__pb2.ListKeysReply.SerializeToString, + ), + 'ListVersions': grpc.unary_unary_rpc_method_handler( + servicer.ListVersions, + request_deserializer=fossildbapi__pb2.ListVersionsRequest.FromString, + response_serializer=fossildbapi__pb2.ListVersionsReply.SerializeToString, + ), + 'Backup': grpc.unary_unary_rpc_method_handler( + servicer.Backup, + request_deserializer=fossildbapi__pb2.BackupRequest.FromString, + response_serializer=fossildbapi__pb2.BackupReply.SerializeToString, + ), + 'RestoreFromBackup': grpc.unary_unary_rpc_method_handler( + servicer.RestoreFromBackup, + request_deserializer=fossildbapi__pb2.RestoreFromBackupRequest.FromString, + response_serializer=fossildbapi__pb2.RestoreFromBackupReply.SerializeToString, + ), + 'CompactAllData': grpc.unary_unary_rpc_method_handler( + servicer.CompactAllData, + request_deserializer=fossildbapi__pb2.CompactAllDataRequest.FromString, + response_serializer=fossildbapi__pb2.CompactAllDataReply.SerializeToString, + ), + 'ExportDB': grpc.unary_unary_rpc_method_handler( + servicer.ExportDB, + request_deserializer=fossildbapi__pb2.ExportDBRequest.FromString, + response_serializer=fossildbapi__pb2.ExportDBReply.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'com.scalableminds.fossildb.proto.FossilDB', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class FossilDB(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def Health(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/Health', + fossildbapi__pb2.HealthRequest.SerializeToString, + fossildbapi__pb2.HealthReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Get(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/Get', + fossildbapi__pb2.GetRequest.SerializeToString, + fossildbapi__pb2.GetReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetMultipleVersions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/GetMultipleVersions', + fossildbapi__pb2.GetMultipleVersionsRequest.SerializeToString, + fossildbapi__pb2.GetMultipleVersionsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetMultipleKeys(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/GetMultipleKeys', + fossildbapi__pb2.GetMultipleKeysRequest.SerializeToString, + fossildbapi__pb2.GetMultipleKeysReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Put(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/Put', + fossildbapi__pb2.PutRequest.SerializeToString, + fossildbapi__pb2.PutReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def PutMultipleVersions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/PutMultipleVersions', + fossildbapi__pb2.PutMultipleVersionsRequest.SerializeToString, + fossildbapi__pb2.PutMultipleVersionsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Delete(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/Delete', + fossildbapi__pb2.DeleteRequest.SerializeToString, + fossildbapi__pb2.DeleteReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteMultipleVersions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/DeleteMultipleVersions', + fossildbapi__pb2.DeleteMultipleVersionsRequest.SerializeToString, + fossildbapi__pb2.DeleteMultipleVersionsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteAllByPrefix(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/DeleteAllByPrefix', + fossildbapi__pb2.DeleteAllByPrefixRequest.SerializeToString, + fossildbapi__pb2.DeleteAllByPrefixReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListKeys(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/ListKeys', + fossildbapi__pb2.ListKeysRequest.SerializeToString, + fossildbapi__pb2.ListKeysReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListVersions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/ListVersions', + fossildbapi__pb2.ListVersionsRequest.SerializeToString, + fossildbapi__pb2.ListVersionsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Backup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/Backup', + fossildbapi__pb2.BackupRequest.SerializeToString, + fossildbapi__pb2.BackupReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def RestoreFromBackup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/RestoreFromBackup', + fossildbapi__pb2.RestoreFromBackupRequest.SerializeToString, + fossildbapi__pb2.RestoreFromBackupReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def CompactAllData(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/CompactAllData', + fossildbapi__pb2.CompactAllDataRequest.SerializeToString, + fossildbapi__pb2.CompactAllDataReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ExportDB(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/ExportDB', + fossildbapi__pb2.ExportDBRequest.SerializeToString, + fossildbapi__pb2.ExportDBReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/tools/migration-unified-annotation-versioning/geometry_pb2.py b/tools/migration-unified-annotation-versioning/geometry_pb2.py new file mode 100644 index 00000000000..f0858e0a7a2 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/geometry_pb2.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: geometry.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0egeometry.proto\x12&com.scalableminds.webknossos.datastore\"/\n\x0cVec3IntProto\x12\t\n\x01x\x18\x01 \x02(\x05\x12\t\n\x01y\x18\x02 \x02(\x05\x12\t\n\x01z\x18\x03 \x02(\x05\"$\n\x0cVec2IntProto\x12\t\n\x01x\x18\x01 \x02(\x05\x12\t\n\x01y\x18\x02 \x02(\x05\"Z\n\x12ListOfVec3IntProto\x12\x44\n\x06values\x18\x01 \x03(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec3IntProto\"2\n\x0fVec3DoubleProto\x12\t\n\x01x\x18\x01 \x02(\x01\x12\t\n\x01y\x18\x02 \x02(\x01\x12\t\n\x01z\x18\x03 \x02(\x01\"8\n\nColorProto\x12\t\n\x01r\x18\x01 \x02(\x01\x12\t\n\x01g\x18\x02 \x02(\x01\x12\t\n\x01\x62\x18\x03 \x02(\x01\x12\t\n\x01\x61\x18\x04 \x02(\x01\"\x87\x01\n\x10\x42oundingBoxProto\x12\x45\n\x07topLeft\x18\x01 \x02(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec3IntProto\x12\r\n\x05width\x18\x02 \x02(\x05\x12\x0e\n\x06height\x18\x03 \x02(\x05\x12\r\n\x05\x64\x65pth\x18\x04 \x02(\x05\"\xd6\x01\n\x15NamedBoundingBoxProto\x12\n\n\x02id\x18\x01 \x02(\x05\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x11\n\tisVisible\x18\x03 \x01(\x08\x12\x41\n\x05\x63olor\x18\x04 \x01(\x0b\x32\x32.com.scalableminds.webknossos.datastore.ColorProto\x12M\n\x0b\x62oundingBox\x18\x05 \x02(\x0b\x32\x38.com.scalableminds.webknossos.datastore.BoundingBoxProto\"8\n\x19\x41\x64\x64itionalCoordinateProto\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\r\n\x05value\x18\x02 \x02(\x05\"x\n\x13\x41\x64\x64itionalAxisProto\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\r\n\x05index\x18\x02 \x02(\x05\x12\x44\n\x06\x62ounds\x18\x03 \x02(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec2IntProto') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'geometry_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _VEC3INTPROTO._serialized_start=58 + _VEC3INTPROTO._serialized_end=105 + _VEC2INTPROTO._serialized_start=107 + _VEC2INTPROTO._serialized_end=143 + _LISTOFVEC3INTPROTO._serialized_start=145 + _LISTOFVEC3INTPROTO._serialized_end=235 + _VEC3DOUBLEPROTO._serialized_start=237 + _VEC3DOUBLEPROTO._serialized_end=287 + _COLORPROTO._serialized_start=289 + _COLORPROTO._serialized_end=345 + _BOUNDINGBOXPROTO._serialized_start=348 + _BOUNDINGBOXPROTO._serialized_end=483 + _NAMEDBOUNDINGBOXPROTO._serialized_start=486 + _NAMEDBOUNDINGBOXPROTO._serialized_end=700 + _ADDITIONALCOORDINATEPROTO._serialized_start=702 + _ADDITIONALCOORDINATEPROTO._serialized_end=758 + _ADDITIONALAXISPROTO._serialized_start=760 + _ADDITIONALAXISPROTO._serialized_end=880 +# @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py new file mode 100755 index 00000000000..5d899ce6b23 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/main.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 + +import logging +import argparse + +from migration import Migration +from utils import setup_logging + +logger = logging.getLogger(__name__) + + +def main(): + setup_logging() + logger.info("Hello from Unified Annotation Versioning Migration!") + parser = argparse.ArgumentParser() + parser.add_argument("--src", type=str, help="Source fossildb host and port. Example: localhost:7155", required=True) + parser.add_argument("--dst", type=str, help="Destination fossildb host and port", required=False) + parser.add_argument("--dry", help="Only read and process data, do not write out results", action="store_true") + parser.add_argument("--num_threads", help="Number of threads to migrate the annotations in parallel", type=int, default=1) + parser.add_argument("--postgres", help="Postgres connection specifier, default is postgresql://postgres@localhost:5432/webknossos", type=str, default="postgresql://postgres@localhost:5432/webknossos") + parser.add_argument("--previous_start", help="Previous run start time. Only annotations last modified after that time will be migrated. Use for second run in incremental migration. Example: 2024-11-27 10:37:30.171083", type=str) + parser.add_argument("--start", help="Run “start time”. Only annotations last modified before that time will be migrated. Defaults to now. Change if FossilDB content is not up to date with postgres. Example: 2024-11-27 10:37:30.171083", type=str) + parser.add_argument("--count_versions", help="Instead of migrating, only count materialized versions of the annotation", action="store_true") + parser.add_argument("--previous_checkpoints", help="Supply checkpoints file of a previous run to resume", type=str) + parser.add_argument("--verbose", "-v", help="Print for every annotation", action="store_true") + args = parser.parse_args() + if args.dst is None and not args.dry: + parser.error("At least one of --dry or --dst is required") + migration = Migration(args) + migration.run() + + +if __name__ == '__main__': + main() diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py new file mode 100644 index 00000000000..24d5c2c6f82 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -0,0 +1,588 @@ +import psycopg2 +import psycopg2.extras +from psycopg2.extras import RealDictRow +import math +import logging +import datetime +from pathlib import Path +import time +from typing import Dict, Tuple, List, Optional, Callable, Set +from rich.progress import track +import msgspec +import concurrent.futures +import threading +from functools import partial +import heapq +import sys + +import fossildbapi_pb2 as proto +import VolumeTracing_pb2 as Volume +import SkeletonTracing_pb2 as Skeleton +import Annotation_pb2 as AnnotationProto +from utils import log_since, batch_range, format_duration, time_str +from connections import connect_to_fossildb, connect_to_postgres, assert_grpc_success + +logger = logging.getLogger("migration-logs") +checkpoint_logger = logging.getLogger("migration-checkpoints") + + +LayerVersionMapping = Dict[str, Dict[int, int]] # tracing id OR old mapping id to (old version to new version) +MappingIdMap = Dict[str, str] # tracing id to editable mapping id + + +class Migration: + + def __init__(self, args): + logger.info(f"Initializing migration with args {args} ...") + self.args = args + self.src_stub = connect_to_fossildb(args.src, "source") + self.dst_stub = None + if not args.dry: + self.dst_stub = connect_to_fossildb(args.dst, "destination") + self.done_count = None + self.done_count_lock = threading.Lock() + self.failure_count = 0 + self.failure_count_lock = threading.Lock() + self.total_count = None + self.before = 0 + + def run(self): + self.before = time.time() + annotations = self.read_annotation_list() + self.setup_checkpoint_logging() + self.done_count = 0 + self.failure_count = 0 + self.total_count = len(annotations) + + with concurrent.futures.ThreadPoolExecutor(max_workers=self.args.num_threads) as executor: + executor.map(self.migrate_annotation, annotations) + log_since(self.before, f"Migrating all the {self.total_count} things") + if self.failure_count > 0: + logger.info(f"There were failures for {self.failure_count} annotations. See logs for details.") + sys.exit(1) + + def migrate_annotation(self, annotation): + before = time.time() + try: + if self.args.count_versions: + versions = 0 + for tracing_id, layer_type in annotation["layers"].items(): + update_collection = self.update_collection_for_layer_type(layer_type) + newest_version = self.get_newest_version(tracing_id, update_collection) + versions += newest_version + if versions > 1: + logger.info(f"{versions} versions for {annotation['_id']}{self.get_progress()}") + else: + if self.args.verbose: + logger.info(f"Migrating annotation {annotation['_id']} (dry={self.args.dry}) ...") + mapping_id_map = self.build_mapping_id_map(annotation) + if self.includes_revert(annotation) and self.args.previous_start is not None: + self.clean_up_previously_migrated(annotation, mapping_id_map) + layer_version_mapping = self.migrate_updates(annotation, mapping_id_map) + materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) + if len(materialized_versions) == 0: + raise ValueError(f"Zero materialized versions present in source FossilDB for annotation {annotation['_id']}.") + self.create_and_save_annotation_proto(annotation, materialized_versions, mapping_id_map) + if time.time() - before > 1 or self.args.verbose: + log_since(before, f"Migrating annotation {annotation['_id']} ({len(materialized_versions)} materialized versions)", self.get_progress()) + checkpoint_logger.info(annotation['_id']) + except Exception: + logger.exception(f"Exception while migrating annotation {annotation['_id']}:") + with self.failure_count_lock: + self.failure_count += 1 + finally: + with self.done_count_lock: + self.done_count += 1 + + def build_mapping_id_map(self, annotation) -> MappingIdMap: + mapping_id_map = {} + for tracing_id, layer_type in annotation["layers"].items(): + if layer_type == "Volume": + editable_mapping_id = self.get_editable_mapping_id(tracing_id, layer_type) + if editable_mapping_id is not None: + mapping_id_map[tracing_id] = editable_mapping_id + return mapping_id_map + + def fetch_updates(self, tracing_or_mapping_id: str, layer_type: str, collection: str, json_encoder, json_decoder) -> Tuple[List[Tuple[int, int, bytes]], bool]: + batch_size = 1000 + newest_version = self.get_newest_version(tracing_or_mapping_id, collection) + updates_for_layer = [] + included_revert = False + next_version = newest_version + for batch_start, batch_end in reversed(list(batch_range(newest_version, batch_size))): + if batch_start > next_version: + continue + update_groups = self.get_update_batch(tracing_or_mapping_id, collection, batch_start, batch_end) + for version, update_group in reversed(update_groups): + if version > next_version: + continue + update_group, timestamp, revert_source_version = self.process_update_group(tracing_or_mapping_id, layer_type, update_group, json_encoder, json_decoder) + if revert_source_version is not None: + next_version = revert_source_version + included_revert = True + else: + next_version -= 1 + if revert_source_version is None: # skip the revert itself too, since we’re ironing them out + updates_for_layer.append((timestamp, version, update_group)) + updates_for_layer.reverse() + return updates_for_layer, included_revert + + def includes_revert(self, annotation) -> bool: + json_encoder = msgspec.json.Encoder() + json_decoder = msgspec.json.Decoder() + layers = list(annotation["layers"].items()) + for tracing_id, layer_type in layers: + collection = self.update_collection_for_layer_type(layer_type) + _, layer_included_revert = self.fetch_updates(tracing_id, layer_type, collection, json_encoder=json_encoder, json_decoder=json_decoder) + if layer_included_revert: + return True + return False + + def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVersionMapping: + all_update_groups = [] + json_encoder = msgspec.json.Encoder() + json_decoder = msgspec.json.Decoder() + layers = list(annotation["layers"].items()) + tracing_ids_and_mapping_ids = [] + for tracing_id, layer_type in layers: + collection = self.update_collection_for_layer_type(layer_type) + layer_updates, _ = self.fetch_updates(tracing_id, layer_type, collection, json_encoder=json_encoder, json_decoder=json_decoder) + all_update_groups.append(layer_updates) + tracing_ids_and_mapping_ids.append(tracing_id) + if tracing_id in mapping_id_map: + mapping_id = mapping_id_map[tracing_id] + layer_updates, _ = self.fetch_updates(mapping_id, "editableMapping", "editableMappingUpdates", json_encoder=json_encoder, json_decoder=json_decoder) + all_update_groups.append(layer_updates) + tracing_ids_and_mapping_ids.append(mapping_id) + + unified_version = 0 + version_mapping = {} + for tracing_or_mapping_id in tracing_ids_and_mapping_ids: + version_mapping[tracing_or_mapping_id] = {0: 0} # We always want to keep the initial version 0 of all layers, even if there are no updates at all. + + # We use a priority queue to efficiently select which tracing each next update should come from. + # This effectively implements a merge sort + queue = [] + for i, update_groups_for_layer in enumerate(all_update_groups): + if update_groups_for_layer: + # The priority queue sorts tuples lexicographically, so timestamp is the main sorting key here + heapq.heappush(queue, (update_groups_for_layer[0], i, 0)) + while queue: + value, layer_index, element_index = heapq.heappop(queue) + timestamp, version, update_group = value + tracing_or_mapping_id = tracing_ids_and_mapping_ids[layer_index] + + unified_version += 1 + version_mapping[tracing_or_mapping_id][version] = unified_version + self.save_update_group(annotation['_id'], unified_version, update_group) + + if element_index + 1 < len(all_update_groups[layer_index]): + next_element = all_update_groups[layer_index][element_index + 1] + heapq.heappush(queue, (next_element, layer_index, element_index + 1)) + + return version_mapping + + def get_editable_mapping_id(self, tracing_id: str, layer_type: str) -> Optional[str]: + if layer_type == "Skeleton": + return None + tracing_raw = self.get_newest_tracing_raw(tracing_id, "volumes") + if tracing_raw is None: + return None + volume = Volume.VolumeTracing() + volume.ParseFromString(tracing_raw) + if volume.hasEditableMapping: + return volume.mappingName + return None + + def get_newest_tracing_raw(self, tracing_id, collection) -> Optional[bytes]: + getReply = self.src_stub.Get( + proto.GetRequest(collection=collection, key=tracing_id, mayBeEmpty=True) + ) + if getReply.success: + return getReply.value + return None + + def process_update_group(self, tracing_id: str, layer_type: str, update_group_raw: bytes, json_encoder, json_decoder) -> Tuple[bytes, int, Optional[int]]: + update_group_parsed = json_decoder.decode(update_group_raw) + + revert_source_version = None + action_timestamp = 0 + + for update in update_group_parsed: + name = update["name"] + update_value = update["value"] + + # renamings + if name == "updateTracing": + update["name"] = f"update{layer_type}Tracing" + elif name == "updateUserBoundingBoxes": + update["name"] = f"updateUserBoundingBoxesIn{layer_type}Tracing" + elif name == "updateUserBoundingBoxVisibility": + update["name"] = f"updateUserBoundingBoxVisibilityIn{layer_type}Tracing" + + name = update["name"] + + # add actionTracingId + if not name == "updateTdCamera": + update["value"]["actionTracingId"] = tracing_id + + # identify compact update actions, and mark them + if (name == "updateBucket" and "position" not in update_value) \ + or (name == "updateVolumeTracing" and "activeSegmentId" not in update_value) \ + or (name == "updateUserBoundingBoxesInVolumeTracing" and "boundingBoxes" not in update_value) \ + or (name == "updateUserBoundingBoxVisibilityInVolumeTracing" and "boundingBoxId" not in update_value) \ + or (name == "deleteSegmentData" and "id" not in update_value) \ + or (name == "createSegment" and "name" not in update_value) \ + or (name == "updateSegment" and "name" not in update_value) \ + or (name == "updateMappingName" and "mappingName" not in update_value): + update["isCompacted"] = True + + if name == "revertToVersion": + # Assumption: revertToVersion actions are the only ones in their group. + revert_source_version = update_value["sourceVersion"] + + if "actionTimestamp" in update_value is not None: + action_timestamp = update_value["actionTimestamp"] + + return json_encoder.encode(update_group_parsed), action_timestamp, revert_source_version + + def save_update_group(self, annotation_id: str, version: int, update_group_raw: bytes) -> None: + self.save_bytes(collection="annotationUpdates", key=annotation_id, version=version, value=update_group_raw) + + def get_newest_version(self, tracing_id: str, collection: str) -> int: + getReply = self.src_stub.Get( + proto.GetRequest(collection=collection, key=tracing_id, mayBeEmpty=True) + ) + if getReply.success: + return getReply.actualVersion + return 0 + + def get_update_batch(self, tracing_or_mapping_id: str, collection: str, batch_start: int, batch_end_inclusive: int) -> List[Tuple[int, bytes]]: + reply = self.src_stub.GetMultipleVersions( + proto.GetMultipleVersionsRequest(collection=collection, key=tracing_or_mapping_id, oldestVersion=batch_start, newestVersion=batch_end_inclusive) + ) + assert_grpc_success(reply) + reply.versions.reverse() + reply.values.reverse() + return list(zip(reply.versions, reply.values)) + + def update_collection_for_layer_type(self, layer_type): + if layer_type == "Skeleton": + return "skeletonUpdates" + return "volumeUpdates" + + def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> Set[int]: + materialized_versions = set() + for tracing_id, tracing_type in annotation["layers"].items(): + materialized_versions_of_layer = \ + self.migrate_materialized_layer(tracing_id, tracing_type, layer_version_mapping, mapping_id_map) + materialized_versions.update(materialized_versions_of_layer) + return materialized_versions + + def migrate_materialized_layer(self, tracing_id: str, layer_type: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> List[int]: + if layer_type == "Skeleton": + return self.migrate_skeleton_proto(tracing_id, layer_version_mapping) + if layer_type == "Volume": + materialized_volume_versions = self.migrate_volume_proto(tracing_id, layer_version_mapping, mapping_id_map) + self.migrate_volume_buckets(tracing_id, layer_version_mapping) + self.migrate_segment_index(tracing_id, layer_version_mapping) + materialized_mapping_versions = self.migrate_editable_mapping(tracing_id, layer_version_mapping, mapping_id_map) + return materialized_volume_versions + materialized_mapping_versions + + def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping) -> List[int]: + collection = "skeletons" + materialized_versions_unified = [] + materialized_versions = self.list_versions(collection, tracing_id) + for materialized_version in materialized_versions: + if materialized_version not in layer_version_mapping[tracing_id]: + continue + new_version = layer_version_mapping[tracing_id][materialized_version] + value_bytes = self.get_bytes(collection, tracing_id, materialized_version) + if materialized_version != new_version: + skeleton = Skeleton.SkeletonTracing() + skeleton.ParseFromString(value_bytes) + skeleton.version = new_version + value_bytes = skeleton.SerializeToString() + materialized_versions_unified.append(new_version) + self.save_bytes(collection, tracing_id, new_version, value_bytes) + return materialized_versions_unified + + def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap): + collection = "volumes" + materialized_versions_unified = [] + materialized_versions = self.list_versions(collection, tracing_id) + for materialized_version in materialized_versions: + if materialized_version not in layer_version_mapping[tracing_id]: + continue + new_version = layer_version_mapping[tracing_id][materialized_version] + value_bytes = self.get_bytes(collection, tracing_id, materialized_version) + if materialized_version != new_version or tracing_id in mapping_id_map: + volume = Volume.VolumeTracing() + volume.ParseFromString(value_bytes) + volume.version = new_version + if tracing_id in mapping_id_map: + volume.mappingName = tracing_id + value_bytes = volume.SerializeToString() + materialized_versions_unified.append(new_version) + self.save_bytes(collection, tracing_id, new_version, value_bytes) + return materialized_versions_unified + + def list_versions(self, collection, key) -> List[int]: + reply = self.src_stub.ListVersions(proto.ListVersionsRequest(collection=collection, key=key)) + assert_grpc_success(reply) + return reply.versions + + def get_bytes(self, collection: str, key: str, version: int) -> bytes: + reply = self.src_stub.Get(proto.GetRequest(collection=collection, key=key, version=version)) + assert_grpc_success(reply) + return reply.value + + def save_bytes(self, collection: str, key: str, version: int, value: bytes) -> None: + if self.dst_stub is not None: + reply = self.dst_stub.Put(proto.PutRequest(collection=collection, key=key, version=version, value=value)) + assert_grpc_success(reply) + + def save_multiple_versions(self, collection: str, key: str, versions: List[int], values: List[bytes]) -> None: + if self.dst_stub is not None: + reply = self.dst_stub.PutMultipleVersions(proto.PutMultipleVersionsRequest(collection=collection, key=key, versions=versions, values=values)) + assert_grpc_success(reply) + + def migrate_volume_buckets(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): + self.migrate_all_versions_and_keys_with_prefix("volumeData", tracing_id, layer_version_mapping, transform_key=self.remove_morton_index) + + def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_or_mapping_id: str, layer_version_mapping: LayerVersionMapping, transform_key: Optional[Callable[[str], str]]): + list_keys_page_size = 5000 + versions_page_size = 500 + current_start_after_key = tracing_or_mapping_id + "." # . is lexicographically before / + newest_tracing_version = max(layer_version_mapping[tracing_or_mapping_id].keys()) + while True: + list_keys_reply = self.src_stub.ListKeys(proto.ListKeysRequest(collection=collection, limit=list_keys_page_size, startAfterKey=current_start_after_key)) + assert_grpc_success(list_keys_reply) + if len(list_keys_reply.keys) == 0: + # We iterated towards the very end of the collection + return + for key in list_keys_reply.keys: + if key.startswith(tracing_or_mapping_id): + for version_range_start, version_range_end in batch_range(newest_tracing_version, versions_page_size): + get_versions_reply = self.src_stub.GetMultipleVersions(proto.GetMultipleVersionsRequest(collection=collection, key=key, oldestVersion=version_range_start, newestVersion=version_range_end)) + assert_grpc_success(get_versions_reply) + new_key = key + if transform_key is not None: + new_key = transform_key(key) + versions_to_save = [] + values_to_save = [] + for version, value in zip(get_versions_reply.versions, get_versions_reply.values): + if version not in layer_version_mapping[tracing_or_mapping_id]: + continue + new_version = layer_version_mapping[tracing_or_mapping_id][version] + versions_to_save.append(new_version) + values_to_save.append(value) + self.save_multiple_versions(collection, new_key, versions_to_save, values_to_save) + current_start_after_key = key + else: + # We iterated past the elements of the current tracing + return + + def migrate_segment_index(self, tracing_id, layer_version_mapping): + self.migrate_all_versions_and_keys_with_prefix("volumeSegmentIndex", tracing_id, layer_version_mapping, transform_key=None) + + def migrate_editable_mapping(self, tracing_id: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> List[int]: + if tracing_id not in mapping_id_map: + return [] + mapping_id = mapping_id_map[tracing_id] + materialized_versions = self.migrate_editable_mapping_info(tracing_id, mapping_id, layer_version_mapping) + self.migrate_editable_mapping_agglomerate_to_graph(tracing_id, mapping_id, layer_version_mapping) + self.migrate_editable_mapping_segment_to_agglomerate(tracing_id, mapping_id, layer_version_mapping) + return materialized_versions + + def migrate_editable_mapping_info(self, tracing_id: str, mapping_id: str, layer_version_mapping: LayerVersionMapping) -> List[int]: + collection = "editableMappingsInfo" + materialized_versions = self.list_versions(collection, mapping_id) + materialized_versions_unified = [] + for materialized_version in materialized_versions: + value_bytes = self.get_bytes(collection, mapping_id, materialized_version) + if materialized_version not in layer_version_mapping[tracing_id]: + continue + new_version = layer_version_mapping[mapping_id][materialized_version] + materialized_versions_unified.append(new_version) + self.save_bytes(collection, tracing_id, new_version, value_bytes) + return materialized_versions_unified + + def migrate_editable_mapping_agglomerate_to_graph(self, tracing_id: str, mapping_id: str, layer_version_mapping: LayerVersionMapping): + self.migrate_all_versions_and_keys_with_prefix( + "editableMappingsAgglomerateToGraph", + mapping_id, + layer_version_mapping, + transform_key=partial(self.replace_before_first_slash, tracing_id) + ) + + def migrate_editable_mapping_segment_to_agglomerate(self, tracing_id: str, mapping_id: str, layer_version_mapping: LayerVersionMapping): + self.migrate_all_versions_and_keys_with_prefix( + "editableMappingsSegmentToAgglomerate", + mapping_id, + layer_version_mapping, + transform_key=partial(self.replace_before_first_slash, tracing_id) + ) + + def create_and_save_annotation_proto(self, annotation, materialized_versions: Set[int], mapping_id_map: MappingIdMap): + skeleton_may_have_pending_updates = self.skeleton_may_have_pending_updates(annotation) + editable_mapping_may_have_pending_updates = bool(mapping_id_map) # same problem as with skeletons, see comment there + earliest_accessible_version = 0 + if len(mapping_id_map) > 0: + # An editable mapping exists in this annotation. + # Merged editable mappings have updates in non-chronological order, + # so accessing their merged update history will lead to unexpected behavior. + # So we forbid it. + earliest_accessible_version = max(materialized_versions) + # We write an annotationProto object for every materialized version of every layer. + for version in materialized_versions: + annotationProto = AnnotationProto.AnnotationProto() + annotationProto.description = annotation["description"] or "" + annotationProto.version = version + annotationProto.earliestAccessibleVersion = earliest_accessible_version + if skeleton_may_have_pending_updates: + annotationProto.skeletonMayHavePendingUpdates = True + if editable_mapping_may_have_pending_updates: + annotationProto.editableMappingsMayHavePendingUpdates = True + for tracing_id, tracing_type in annotation["layers"].items(): + layer_proto = AnnotationProto.AnnotationLayerProto() + layer_proto.tracingId = tracing_id + layer_proto.name = annotation["layernames"][tracing_id] + layer_type_proto = AnnotationProto.AnnotationLayerTypeProto.Skeleton + if tracing_type == "Volume": + layer_type_proto = AnnotationProto.AnnotationLayerTypeProto.Volume + layer_proto.typ = layer_type_proto + annotationProto.annotationLayers.append(layer_proto) + self.save_bytes(collection="annotations", key=annotation["_id"], version=version, value=annotationProto.SerializeToString()) + + def skeleton_may_have_pending_updates(self, annotation) -> bool: + # Skeletons in the old code had their updates applied lazily. + # Thus, the current materialized skeleton may not be up to date + # But since we are writing materialized annotationProto for every materialized version from every layer + # the skeleton must be marked as skeletonMayHavePendingUpdates + # We do this always, except if there is no skeleton, + # or if it is the only layer (then the materialized set matches) + if len(annotation["layers"]) < 2: + return False + return "Skeleton" in annotation["layers"].values() + + def clean_up_previously_migrated(self, annotation, mapping_id_map: MappingIdMap) -> None: + before = time.time() + logger.info(f"Cleaning up previously migrated annotation {annotation['_id']}...") + self.delete_all_versions("annotations", annotation["_id"]) + self.delete_all_versions("annotationUpdates", annotation["_id"]) + for tracing_id, layer_type in annotation["layers"].items(): + if layer_type == "Skeleton": + self.delete_all_versions("skeletons", tracing_id) + elif layer_type == "Volume": + self.delete_all_versions("volumes", tracing_id) + self.delete_all_with_prefix("volumeData", tracing_id) + self.delete_all_with_prefix("volumeSegmentIndex", tracing_id) + for mapping_id in mapping_id_map.values(): + self.delete_all_versions("editableMappingsInfo", mapping_id) + self.delete_all_with_prefix("editableMappingsAgglomerateToGraph", mapping_id) + self.delete_all_with_prefix("editableMappingsSegmentToAgglomerate", mapping_id) + log_since(before, f"Cleaning up previously migrated annotation {annotation['_id']}") + + def delete_all_versions(self, collection: str, id: str) -> None: + reply = self.dst_stub.DeleteMultipleVersions(proto.DeleteMultipleVersionsRequest(collection=collection, key=id)) + assert_grpc_success(reply) + + def delete_all_with_prefix(self, collection: str, prefix: str) -> None: + reply = self.dst_stub.DeleteAllByPrefix(proto.DeleteAllByPrefixRequest(collection=collection, prefix=prefix)) + assert_grpc_success(reply) + + def read_annotation_list(self): + checkpoint_set = self.read_checkpoints() + before = time.time() + start_time = str(datetime.datetime.now()) + if self.args.start is not None: + start_time = self.args.start + previous_start_label = "" + previous_start_query = "" + if self.args.previous_start is not None: + previous_start_label = f" and after previous start time {self.args.previous_start}" + previous_start_query = f" AND modified > '{self.args.previous_start}'" + logger.info(f"Looking only for annotations last modified before start time {start_time}{previous_start_label}.") + logger.info("Determining annotation count from postgres...") + page_size = 10000 + connection = connect_to_postgres(self.args.postgres) + cursor = connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) + + cursor.execute(f"SELECT COUNT(*) FROM webknossos.annotations WHERE modified < '{start_time}'{previous_start_query}") + annotation_count = cursor.fetchone()['count'] + logger.info(f"Loading infos of {annotation_count} annotations from postgres ...") + annotations = [] + page_count = math.ceil(annotation_count / page_size) + for page_num in track(range(page_count), total=page_count, description=f"Loading annotation infos ..."): + query = f""" + WITH annotations AS ( + SELECT _id, name, description, created, modified FROM webknossos.annotations + WHERE modified < '{start_time}' + {previous_start_query} + ORDER BY MD5(_id) + LIMIT {page_size} + OFFSET {page_size * page_num} + ) + + SELECT + a._id, a.name, a.description, a.created, a.modified, + JSON_OBJECT_AGG(al.tracingId, al.typ) AS layers, + JSON_OBJECT_AGG(al.tracingId, al.name) AS layerNames + FROM webknossos.annotation_layers al + JOIN annotations a on al._annotation = a._id + GROUP BY a._id, a.name, a.description, a.created, a.modified + """ + cursor.execute(query) + rows = cursor.fetchall() + for row in rows: + if len(checkpoint_set) == 0 or row["_id"] not in checkpoint_set: + annotations.append(row) + if annotation_count != len(annotations): + logger.info(f"Using {len(annotations)} of the full {annotation_count} annotations (after filtering out zero-layer and already-checkpointed annotations).") + log_since(before, "Loading annotation infos from postgres") + return annotations + + def remove_morton_index(self, bucket_key: str) -> str: + first_slash_index = bucket_key.index('/') + second_slash_index = bucket_key.index('/', first_slash_index + 1) + first_bracket_index = bucket_key.index('[') + return bucket_key[:second_slash_index + 1] + bucket_key[first_bracket_index:] + + def replace_before_first_slash(self, replacement_prefix: str, key) -> str: + slash_pos = key.find('/') + return replacement_prefix + key[slash_pos:] + + def get_progress(self) -> str: + with self.done_count_lock: + done_count = self.done_count + percentage = 100.0 * done_count / self.total_count + duration = time.time() - self.before + if done_count > 0: + etr = duration / done_count * (self.total_count - done_count) + etr_formatted = f". ETR {format_duration(etr)})" + else: + etr_formatted = ")" + return f". ({done_count}/{self.total_count} = {percentage:.1f}% done{etr_formatted}" + + def read_checkpoints(self) -> Set[str]: + if self.args.previous_checkpoints is None: + return set() + with open(self.args.previous_checkpoints, 'r') as previous_checkpoints_file: + previous_checkpoints = set(line.strip() for line in previous_checkpoints_file) + logger.info(f"Using checkpoints from previous run with {len(previous_checkpoints)} entries.") + return previous_checkpoints + + def setup_checkpoint_logging(self): + # We are abusing the logging module to write the checkpoints, as they are thread-safe and provide a file-handler + checkpoint_logger.setLevel(logging.INFO) + checkpoints_path = Path("checkpoints") + checkpoints_path.mkdir(exist_ok=True) + if self.args.previous_checkpoints is not None: + checkpoint_file = self.args.previous_checkpoints + logger.info(f"Appending to supplied checkpoint file at {checkpoint_file}") + else: + checkpoint_file = f"{checkpoints_path}/{time_str()}.log" + logger.info(f"Writing checkpoint file at {checkpoint_file}") + checkpoints_file_handler = logging.FileHandler(checkpoint_file) + checkpoint_logger.addHandler(checkpoints_file_handler) diff --git a/tools/migration-unified-annotation-versioning/requirements.txt b/tools/migration-unified-annotation-versioning/requirements.txt new file mode 100644 index 00000000000..d7a16104aed --- /dev/null +++ b/tools/migration-unified-annotation-versioning/requirements.txt @@ -0,0 +1,6 @@ +grpcio==1.68.0 +argparse==1.4.0 +psycopg2-binary==2.9.10 +protobuf==5.28.3 +rich==13.9.4 +msgspec==0.18.6 diff --git a/tools/migration-unified-annotation-versioning/utils.py b/tools/migration-unified-annotation-versioning/utils.py new file mode 100644 index 00000000000..91def43fcba --- /dev/null +++ b/tools/migration-unified-annotation-versioning/utils.py @@ -0,0 +1,81 @@ +import logging +import time +from typing import Iterator, Tuple +import sys +from math import floor, ceil +from datetime import datetime +from pathlib import Path + +logger = logging.getLogger("migration-logs") + + +def setup_logging(): + logger.setLevel(logging.DEBUG) + + formatter = logging.Formatter("%(asctime)s %(levelname)-8s %(threadName)-24s %(message)s") + + stdout_handler = logging.StreamHandler(sys.stdout) + stdout_handler.setLevel(logging.DEBUG) + stdout_handler.setFormatter(formatter) + logger.addHandler(stdout_handler) + + logs_path = Path("logs") + logs_path.mkdir(exist_ok=True) + file_handler = logging.FileHandler(f"{logs_path}/{time_str()}.log") + file_handler.setLevel(logging.DEBUG) + file_handler.setFormatter(formatter) + logger.addHandler(file_handler) + + +def time_str() -> str: + return datetime.now().strftime("%Y-%m-%d_%H-%M-%S.%f") + + +def log_since(before, label: str, postfix: str = "") -> None: + diff = time.time() - before + logger.info(f"{label} took {format_duration(diff)}{postfix}") + + +def batch_range( + limit: int, batch_size: int +) -> Iterator[Tuple[int, int]]: + full_range = range(limit) + + for i in range(full_range.start, full_range.stop, batch_size): + yield i, min(i + batch_size, full_range.stop) + + if i + batch_size >= full_range.stop: + return + + +def format_duration(seconds: float) -> str: + def pluralize(string: str, amount: int) -> str: + return string if amount == 1 else string + "s" + + max_elements = 3 + + label_elements = [] + + days = floor(seconds / 3600 / 24) + if days > 0 and len(label_elements) < max_elements: + label_elements.append(pluralize(f"{days} day", days)) + seconds -= days * 24 * 3600 + + hours = floor(seconds / 3600) + if hours > 0 and len(label_elements) < max_elements: + label_elements.append(f"{hours}h") + seconds -= hours * 3600 + + minutes = floor(seconds / 60) + if minutes > 0 and len(label_elements) < max_elements: + label_elements.append(f"{minutes}m") + seconds -= minutes * 60 + + whole_seconds = ceil(seconds) + if seconds >= 0 and len(label_elements) < max_elements: + if len(label_elements) < 1: + label_elements.append(f"{seconds:.2f}s") + else: + label_elements.append(f"{whole_seconds}s") + + return " ".join(label_elements) diff --git a/util/src/main/scala/collections/SequenceUtils.scala b/util/src/main/scala/collections/SequenceUtils.scala index 9d839141c7d..93978cbd9ba 100644 --- a/util/src/main/scala/collections/SequenceUtils.scala +++ b/util/src/main/scala/collections/SequenceUtils.scala @@ -6,4 +6,48 @@ object SequenceUtils { if (uniqueElements.length == 1) uniqueElements.headOption else None } + + /* + Split a list into n parts, isolating the elements that satisfy the given predicate. + Those elements will be in single-item lists + Example: + splitAndIsolate(List(1,2,3,4,5,6,7))(i => i == 4) + → List(List(1, 2, 3), List(4), List(5, 6, 7)) + splitAndIsolate(List(1,2,3,4,5,6,7))(i => i % 3 == 0) + → List(List(1, 2), List(3), List(4, 5), List(6), List(7)) + splitAndIsolate(List(1,2,3,4,5,6,7))(i => i > 1000) # no matches → no splitting + → List(List(1, 2, 3, 4, 5, 6, 7)) + splitAndIsolate(List())(i => true) # empty list stays empty + → List() + */ + def splitAndIsolate[T](list: List[T])(predicate: T => Boolean): List[List[T]] = + list + .foldLeft(List[List[T]]()) { (acc, item) => + if (predicate(item)) { + List.empty :: List(item) :: acc + } else { + acc match { + case head :: tail => (item :: head) :: tail + case Nil => List(List(item)) + } + } + } + .reverse // we prepended on the outer list (for perf reasons) + .map(_.reverse) // we prepended on the inner lists (for perf reasons) + + /* + Create a Seq of Long range tuples, covering a given inclusive Long range. + The individual ranges should be treated as inclusive as well. + Example: + batchRangeInclusive(0,5,3) + → Seq((0,2), (3,5)) + batchRangeInclusive(0,6,2) + → Seq((0,1), (2,3), (4,5), (6,6)) + */ + def batchRangeInclusive(from: Long, to: Long, batchSize: Long): Seq[(Long, Long)] = + (0L to ((to - from) / batchSize)).map { batchIndex => + val batchFrom = batchIndex * batchSize + from + val batchTo = Math.min(to, (batchIndex + 1) * batchSize + from - 1) + (batchFrom, batchTo) + } } diff --git a/util/src/main/scala/com/scalableminds/util/accesscontext/TokenContext.scala b/util/src/main/scala/com/scalableminds/util/accesscontext/TokenContext.scala new file mode 100644 index 00000000000..2a74b356bf4 --- /dev/null +++ b/util/src/main/scala/com/scalableminds/util/accesscontext/TokenContext.scala @@ -0,0 +1,4 @@ +package com.scalableminds.util.accesscontext + +// to be used in datastore and tracingstore to hand around tokens that were supplied with the request +case class TokenContext(userTokenOpt: Option[String]) diff --git a/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala b/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala index 2669922ad58..e04cdad8b60 100644 --- a/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala +++ b/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala @@ -1,6 +1,7 @@ package com.scalableminds.util.mvc import com.google.protobuf.CodedInputStream +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{BoxImplicits, Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging import net.liftweb.common._ @@ -235,8 +236,8 @@ trait ValidationHelpers { } trait RequestTokenHelper { - protected def urlOrHeaderToken(token: Option[String], request: Request[Any]): Option[String] = - token.orElse(request.headers.get("X-Auth-Token")) + implicit def tokenContextForRequest(implicit request: Request[Any]): TokenContext = + TokenContext(request.target.getQueryParameter("token").orElse(request.headers.get("X-Auth-Token"))) } trait ExtendedController diff --git a/util/src/main/scala/com/scalableminds/util/tools/Fox.scala b/util/src/main/scala/com/scalableminds/util/tools/Fox.scala index acdba7ba1c6..d06569ea50b 100644 --- a/util/src/main/scala/com/scalableminds/util/tools/Fox.scala +++ b/util/src/main/scala/com/scalableminds/util/tools/Fox.scala @@ -105,13 +105,14 @@ object Fox extends FoxImplicits { def sequence[T](l: List[Fox[T]])(implicit ec: ExecutionContext): Future[List[Box[T]]] = Future.sequence(l.map(_.futureBox)) - def combined[T](l: List[Fox[T]])(implicit ec: ExecutionContext): Fox[List[T]] = + def combined[T](l: Seq[Fox[T]])(implicit ec: ExecutionContext): Fox[List[T]] = Fox(Future.sequence(l.map(_.futureBox)).map { results => results.find(_.isEmpty) match { case Some(Empty) => Empty case Some(failure: Failure) => failure case _ => - Full(results.map(_.openOrThrowException("An exception should never be thrown, all boxes must be full"))) + Full( + results.map(_.openOrThrowException("An exception should never be thrown, all boxes must be full")).toList) } }) @@ -137,7 +138,7 @@ object Fox extends FoxImplicits { } // Run serially, fail on the first failure - def serialCombined[A, B](l: List[A])(f: A => Fox[B])(implicit ec: ExecutionContext): Fox[List[B]] = + def serialCombined[A, B](l: Iterable[A])(f: A => Fox[B])(implicit ec: ExecutionContext): Fox[List[B]] = serialCombined(l.iterator)(f) // Run serially, fail on the first failure diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala index c031060e1be..fe88493c36b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala @@ -48,14 +48,12 @@ class BinaryDataController @Inject()( val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.dataStoreAdHocMeshService def requestViaWebknossos( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String ): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { logTime(slackNotificationService.noticeSlowRequest) { val t = Instant.now for { @@ -79,7 +77,6 @@ class BinaryDataController @Inject()( * Handles requests for raw binary data via HTTP GET. */ def requestRawCuboid( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, @@ -97,9 +94,8 @@ class BinaryDataController @Inject()( halfByte: Boolean, mappingName: Option[String] ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetDirectoryName, @@ -118,14 +114,12 @@ class BinaryDataController @Inject()( } def requestRawCuboidPost( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String ): Action[RawCuboidRequest] = Action.async(validateJson[RawCuboidRequest]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetDirectoryName, @@ -138,8 +132,7 @@ class BinaryDataController @Inject()( /** * Handles a request for raw binary data via a HTTP GET. Used by knossos. */ - def requestViaKnossos(token: Option[String], - organizationId: String, + def requestViaKnossos(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: Int, @@ -147,9 +140,8 @@ class BinaryDataController @Inject()( y: Int, z: Int, cubeSize: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetDirectoryName, @@ -165,8 +157,7 @@ class BinaryDataController @Inject()( } } - def thumbnailJpeg(token: Option[String], - organizationId: String, + def thumbnailJpeg(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, @@ -180,9 +171,8 @@ class BinaryDataController @Inject()( intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetDirectoryName, @@ -223,15 +213,13 @@ class BinaryDataController @Inject()( } def mappingJson( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetDirectoryName, @@ -246,14 +234,12 @@ class BinaryDataController @Inject()( /** * Handles ad-hoc mesh requests. */ - def requestAdHocMesh(token: Option[String], - organizationId: String, + def requestAdHocMesh(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetDirectoryName, @@ -289,14 +275,10 @@ class BinaryDataController @Inject()( private def formatNeighborList(neighbors: List[Int]): String = "[" + neighbors.mkString(", ") + "]" - def findData(token: Option[String], - organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[AnyContent] = + def findData(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetDirectoryName, @@ -306,14 +288,10 @@ class BinaryDataController @Inject()( } } - def histogram(token: Option[String], - organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[AnyContent] = + def histogram(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetDirectoryName, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala index 36d3b25eeec..aa81aae8589 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala @@ -23,36 +23,30 @@ class DSMeshController @Inject()( override def allowRemoteOrigin: Boolean = true - def listMeshFiles(token: Option[String], - organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[AnyContent] = + def listMeshFiles(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { meshFiles <- meshFileService.exploreMeshFiles(organizationId, datasetDirectoryName, dataLayerName) } yield Ok(Json.toJson(meshFiles)) } } - def listMeshChunksForSegment(token: Option[String], - organizationId: String, + def listMeshChunksForSegment(organizationId: String, datasetDirectoryName: String, dataLayerName: String, /* If targetMappingName is set, assume that meshfile contains meshes for - the oversegmentation. Collect mesh chunks of all *unmapped* segment ids - belonging to the supplied agglomerate id. - If it is not set, use meshfile as is, assume passed id is present in meshfile - Note: in case of an editable mapping, targetMappingName is its baseMapping name. + the oversegmentation. Collect mesh chunks of all *unmapped* segment ids + belonging to the supplied agglomerate id. + If it is not set, use meshfile as is, assume passed id is present in meshfile + Note: in case of an editable mapping, targetMappingName is its baseMapping name. */ targetMappingName: Option[String], editableMappingTracingId: Option[String]): Action[ListMeshChunksRequest] = Action.async(validateJson[ListMeshChunksRequest]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { _ <- Fox.successful(()) mappingNameForMeshFile = meshFileService.mappingNameForMeshFile(organizationId, @@ -67,8 +61,7 @@ class DSMeshController @Inject()( editableMappingTracingId, request.body.segmentId, mappingNameForMeshFile, - omitMissing = false, - urlOrHeaderToken(token, request) + omitMissing = false ) chunkInfos <- meshFileService.listMeshChunksForSegmentsMerged(organizationId, datasetDirectoryName, @@ -79,14 +72,12 @@ class DSMeshController @Inject()( } } - def readMeshChunk(token: Option[String], - organizationId: String, + def readMeshChunk(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[MeshChunkDataRequestList] = Action.async(validateJson[MeshChunkDataRequestList]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { (data, encoding) <- meshFileService.readMeshChunk(organizationId, datasetDirectoryName, @@ -100,17 +91,14 @@ class DSMeshController @Inject()( } } - def loadFullMeshStl(token: Option[String], - organizationId: String, + def loadFullMeshStl(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { - data: Array[Byte] <- fullMeshService.loadFor(token: Option[String], - organizationId, + data: Array[Byte] <- fullMeshService.loadFor(organizationId, datasetDirectoryName, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 84a3e373622..72e587ab9f4 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -56,14 +56,11 @@ class DataSourceController @Inject()( override def allowRemoteOrigin: Boolean = true - def readInboxDataSource(token: Option[String], - organizationId: String, - datasetDirectoryName: String): Action[AnyContent] = + def readInboxDataSource(organizationId: String, datasetDirectoryName: String): Action[AnyContent] = Action.async { implicit request => { - accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContextForSyncBlock( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { // Read directly from file, not from repository to ensure recent changes are seen val dataSource: InboxDataSource = dataSourceService.dataSourceFromDir( @@ -74,22 +71,22 @@ class DataSourceController @Inject()( } } - def triggerInboxCheckBlocking(token: Option[String]): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources, urlOrHeaderToken(token, request)) { + def triggerInboxCheckBlocking(): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { for { _ <- dataSourceService.checkInbox(verbose = true) } yield Ok } } - def reserveUpload(token: Option[String]): Action[ReserveUploadInformation] = + def reserveUpload(): Action[ReserveUploadInformation] = Action.async(validateJson[ReserveUploadInformation]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organization), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.administrateDataSources(request.body.organization)) { for { isKnownUpload <- uploadService.isKnownUpload(request.body.uploadId) _ <- if (!isKnownUpload) { - (dsRemoteWebknossosClient.reserveDataSourceUpload(request.body, urlOrHeaderToken(token, request)) ?~> "dataset.upload.validation.failed") + (dsRemoteWebknossosClient.reserveDataSourceUpload(request.body) ?~> "dataset.upload.validation.failed") .flatMap(reserveUploadAdditionalInfo => uploadService.reserveUpload(request.body, reserveUploadAdditionalInfo)) } else Fox.successful(()) @@ -97,13 +94,11 @@ class DataSourceController @Inject()( } } - def getUnfinishedUploads(token: Option[String], organizationName: String): Action[AnyContent] = + def getUnfinishedUploads(organizationName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationName), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(organizationName)) { for { - unfinishedUploads <- dsRemoteWebknossosClient.getUnfinishedUploadsForUser(urlOrHeaderToken(token, request), - organizationName) + unfinishedUploads <- dsRemoteWebknossosClient.getUnfinishedUploadsForUser(organizationName) unfinishedUploadsWithUploadIds <- uploadService.addUploadIdsToUnfinishedUploads(unfinishedUploads) unfinishedUploadsWithUploadIdsWithoutDataSourceId = unfinishedUploadsWithUploadIds.map(_.withoutDataSourceId) } yield Ok(Json.toJson(unfinishedUploadsWithUploadIdsWithoutDataSourceId)) @@ -112,10 +107,10 @@ class DataSourceController @Inject()( // To be called by people with disk access but not DatasetManager role. This way, they can upload a dataset manually on disk, // and it can be put in a webknossos folder where they have access - def reserveManualUpload(token: Option[String]): Action[ReserveManualUploadInformation] = + def reserveManualUpload(): Action[ReserveManualUploadInformation] = Action.async(validateJson[ReserveManualUploadInformation]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organization), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.administrateDataSources(request.body.organization)) { for { _ <- dsRemoteWebknossosClient.reserveDataSourceUpload( ReserveUploadInformation( @@ -127,8 +122,7 @@ class DataSourceController @Inject()( None, request.body.initialTeamIds, request.body.folderId - ), - urlOrHeaderToken(token, request) + ) ) ?~> "dataset.upload.validation.failed" } yield Ok } @@ -148,7 +142,7 @@ class DataSourceController @Inject()( - As GET parameter: - token (string): datastore token identifying the uploading user */ - def uploadChunk(token: Option[String]): Action[MultipartFormData[Files.TemporaryFile]] = + def uploadChunk(): Action[MultipartFormData[Files.TemporaryFile]] = Action.async(parse.multipartFormData) { implicit request => val uploadForm = Form( tuple( @@ -167,8 +161,8 @@ class DataSourceController @Inject()( for { dataSourceId <- uploadService.getDataSourceIdByUploadId( uploadService.extractDatasetUploadId(uploadFileId)) ?~> "dataset.upload.validation.failed" - result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { + result <- accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.writeDataSource(dataSourceId)) { for { isKnownUpload <- uploadService.isKnownUploadByFileId(uploadFileId) _ <- bool2Fox(isKnownUpload) ?~> "dataset.upload.validation.failed" @@ -185,13 +179,12 @@ class DataSourceController @Inject()( ) } - def testChunk(token: Option[String], resumableChunkNumber: Int, resumableIdentifier: String): Action[AnyContent] = + def testChunk(resumableChunkNumber: Int, resumableIdentifier: String): Action[AnyContent] = Action.async { implicit request => for { dataSourceId <- uploadService.getDataSourceIdByUploadId( uploadService.extractDatasetUploadId(resumableIdentifier)) ?~> "dataset.upload.validation.failed" - result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { + result <- accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataSource(dataSourceId)) { for { isKnownUpload <- uploadService.isKnownUploadByFileId(resumableIdentifier) _ <- bool2Fox(isKnownUpload) ?~> "dataset.upload.validation.failed" @@ -201,39 +194,35 @@ class DataSourceController @Inject()( } yield result } - def finishUpload(token: Option[String]): Action[UploadInformation] = Action.async(validateJson[UploadInformation]) { - implicit request => - log() { - for { - dataSourceId <- uploadService - .getDataSourceIdByUploadId(request.body.uploadId) ?~> "dataset.upload.validation.failed" - response <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { - for { - (dataSourceId, datasetSizeBytes) <- uploadService - .finishUpload(request.body) ?~> "dataset.upload.finishFailed" - uploadedDatasetIdJson <- dsRemoteWebknossosClient.reportUpload( - dataSourceId, - datasetSizeBytes, - request.body.needsConversion.getOrElse(false), - viaAddRoute = false, - userToken = urlOrHeaderToken(token, request) - ) ?~> "reportUpload.failed" - } yield Ok(Json.obj("newDatasetId" -> uploadedDatasetIdJson)) - } - } yield response - } + def finishUpload(): Action[UploadInformation] = Action.async(validateJson[UploadInformation]) { implicit request => + log() { + for { + dataSourceId <- uploadService + .getDataSourceIdByUploadId(request.body.uploadId) ?~> "dataset.upload.validation.failed" + response <- accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataSource(dataSourceId)) { + for { + (dataSourceId, datasetSizeBytes) <- uploadService + .finishUpload(request.body) ?~> "dataset.upload.finishFailed" + uploadedDatasetIdJson <- dsRemoteWebknossosClient.reportUpload( + dataSourceId, + datasetSizeBytes, + request.body.needsConversion.getOrElse(false), + viaAddRoute = false + ) ?~> "reportUpload.failed" + } yield Ok(Json.obj("newDatasetId" -> uploadedDatasetIdJson)) + } + } yield response + } } - def cancelUpload(token: Option[String]): Action[CancelUploadInformation] = + def cancelUpload(): Action[CancelUploadInformation] = Action.async(validateJson[CancelUploadInformation]) { implicit request => val dataSourceIdFox = uploadService.isKnownUpload(request.body.uploadId).flatMap { case false => Fox.failure("dataset.upload.validation.failed") case true => uploadService.getDataSourceIdByUploadId(request.body.uploadId) } dataSourceIdFox.flatMap { dataSourceId => - accessTokenService.validateAccess(UserAccessRequest.deleteDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.deleteDataSource(dataSourceId)) { for { _ <- dsRemoteWebknossosClient.deleteDataSource(dataSourceId) ?~> "dataset.delete.webknossos.failed" _ <- uploadService.cancelUpload(request.body) ?~> "Could not cancel the upload." @@ -243,28 +232,24 @@ class DataSourceController @Inject()( } def listMappings( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContextForSyncBlock( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { addNoCacheHeaderFallback( Ok(Json.toJson(dataSourceService.exploreMappings(organizationId, datasetDirectoryName, dataLayerName)))) } } def listAgglomerates( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateList = agglomerateService.exploreAgglomerates(organizationId, datasetDirectoryName, dataLayerName) @@ -273,16 +258,14 @@ class DataSourceController @Inject()( } def generateAgglomerateSkeleton( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox skeleton <- agglomerateService.generateSkeleton(organizationId, @@ -295,16 +278,14 @@ class DataSourceController @Inject()( } def agglomerateGraph( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateGraph <- agglomerateService.generateAgglomerateGraph( @@ -315,16 +296,14 @@ class DataSourceController @Inject()( } def positionForSegmentViaAgglomerateFile( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, segmentId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox position <- agglomerateService.positionForSegmentId( @@ -335,15 +314,13 @@ class DataSourceController @Inject()( } def largestAgglomerateId( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox largestAgglomerateId: Long <- agglomerateService @@ -361,15 +338,13 @@ class DataSourceController @Inject()( } def agglomerateIdsForSegmentIds( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateIds: Seq[Long] <- agglomerateService @@ -388,15 +363,13 @@ class DataSourceController @Inject()( } def agglomerateIdsForAllSegmentIds( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateIds: Array[Long] <- agglomerateService @@ -413,28 +386,23 @@ class DataSourceController @Inject()( } } - def update(token: Option[String], organizationId: String, datasetDirectoryName: String): Action[DataSource] = + def update(organizationId: String, datasetDirectoryName: String): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.writeDataSource(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.writeDataSource(DataSourceId(datasetDirectoryName, organizationId))) { for { _ <- Fox.successful(()) - dataSource <- dataSourceRepository - .find(DataSourceId(datasetDirectoryName, organizationId)) - .toFox ?~> Messages("dataSource.notFound") ~> NOT_FOUND + dataSource <- dataSourceRepository.get(DataSourceId(datasetDirectoryName, organizationId)).toFox ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND _ <- dataSourceService.updateDataSource(request.body.copy(id = dataSource.id), expectExisting = true) } yield Ok } } // Stores a remote dataset in the database. - def add(token: Option[String], - organizationId: String, - datasetName: String, - folderId: Option[String]): Action[DataSource] = + def add(organizationId: String, datasetName: String, folderId: Option[String]): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { for { reservedAdditionalInfo <- dsRemoteWebknossosClient.reserveDataSourceUpload( ReserveUploadInformation( @@ -446,40 +414,34 @@ class DataSourceController @Inject()( layersToLink = None, initialTeams = List.empty, folderId = folderId, - ), - urlOrHeaderToken(token, request) + ) ) ?~> "dataset.upload.validation.failed" datasourceId = DataSourceId(reservedAdditionalInfo.directoryName, organizationId) _ <- dataSourceService.updateDataSource(request.body.copy(id = datasourceId), expectExisting = false) - uploadedDatasetId <- dsRemoteWebknossosClient.reportUpload( - datasourceId, - 0L, - needsConversion = false, - viaAddRoute = true, - userToken = urlOrHeaderToken(token, request)) ?~> "reportUpload.failed" + uploadedDatasetId <- dsRemoteWebknossosClient.reportUpload(datasourceId, + 0L, + needsConversion = false, + viaAddRoute = true) ?~> "reportUpload.failed" } yield Ok(Json.obj("newDatasetId" -> uploadedDatasetId)) } } - def createOrganizationDirectory(token: Option[String], organizationId: String): Action[AnyContent] = Action.async { - implicit request => - accessTokenService.validateAccessForSyncBlock(UserAccessRequest.administrateDataSources(organizationId), token) { - val newOrganizationDirectory = new File(f"${dataSourceService.dataBaseDir}/$organizationId") - newOrganizationDirectory.mkdirs() - if (newOrganizationDirectory.isDirectory) - Ok - else - BadRequest - } + def createOrganizationDirectory(organizationId: String): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContextForSyncBlock( + UserAccessRequest.administrateDataSources(organizationId)) { + val newOrganizationDirectory = new File(f"${dataSourceService.dataBaseDir}/$organizationId") + newOrganizationDirectory.mkdirs() + if (newOrganizationDirectory.isDirectory) + Ok + else + BadRequest + } } - def measureUsedStorage(token: Option[String], - organizationId: String, - datasetDirectoryName: Option[String] = None): Action[AnyContent] = + def measureUsedStorage(organizationId: String, datasetDirectoryName: Option[String] = None): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(organizationId)) { for { before <- Fox.successful(System.currentTimeMillis()) usedStorageInBytes: List[DirectoryStorageReport] <- storageUsageService.measureStorage(organizationId, @@ -494,13 +456,11 @@ class DataSourceController @Inject()( } } - def reload(token: Option[String], - organizationId: String, + def reload(organizationId: String, datasetDirectoryName: String, layerName: Option[String] = None): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(organizationId)) { val (closedAgglomerateFileHandleCount, clearedBucketProviderCount, removedChunksCount) = binaryDataServiceHolder.binaryDataService.clearCache(organizationId, datasetDirectoryName, layerName) val closedMeshFileHandleCount = meshFileService.clearCache(organizationId, datasetDirectoryName, layerName) @@ -521,11 +481,10 @@ class DataSourceController @Inject()( } } - def deleteOnDisk(token: Option[String], organizationId: String, datasetDirectoryName: String): Action[AnyContent] = + def deleteOnDisk(organizationId: String, datasetDirectoryName: String): Action[AnyContent] = Action.async { implicit request => val dataSourceId = DataSourceId(datasetDirectoryName, organizationId) - accessTokenService.validateAccess(UserAccessRequest.deleteDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.deleteDataSource(dataSourceId)) { for { _ <- binaryDataServiceHolder.binaryDataService.deleteOnDisk( organizationId, @@ -536,27 +495,25 @@ class DataSourceController @Inject()( } } - def compose(token: Option[String]): Action[ComposeRequest] = + def compose(): Action[ComposeRequest] = Action.async(validateJson[ComposeRequest]) { implicit request => - val userToken = urlOrHeaderToken(token, request) - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organizationId), token) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.administrateDataSources(request.body.organizationId)) { for { _ <- Fox.serialCombined(request.body.layers.map(_.dataSourceId).toList)(id => - accessTokenService.assertUserAccess(UserAccessRequest.readDataSources(id), userToken)) - (dataSource, newDatasetId) <- composeService.composeDataset(request.body, userToken) + accessTokenService.assertUserAccess(UserAccessRequest.readDataSources(id))) + (dataSource, newDatasetId) <- composeService.composeDataset(request.body) _ <- dataSourceRepository.updateDataSource(dataSource) } yield Ok(Json.obj("newDatasetId" -> newDatasetId)) } } - def listConnectomeFiles(token: Option[String], - organizationId: String, + def listConnectomeFiles(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { val connectomeFileNames = connectomeFileService.exploreConnectomeFiles(organizationId, datasetDirectoryName, dataLayerName) for { @@ -575,14 +532,12 @@ class DataSourceController @Inject()( } } - def getSynapsesForAgglomerates(token: Option[String], - organizationId: String, + def getSynapsesForAgglomerates(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[ByAgglomerateIdsRequest] = Action.async(validateJson[ByAgglomerateIdsRequest]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -592,15 +547,13 @@ class DataSourceController @Inject()( } } - def getSynapticPartnerForSynapses(token: Option[String], - organizationId: String, + def getSynapticPartnerForSynapses(organizationId: String, datasetDirectoryName: String, dataLayerName: String, direction: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -612,14 +565,12 @@ class DataSourceController @Inject()( } } - def getSynapsePositions(token: Option[String], - organizationId: String, + def getSynapsePositions(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -629,14 +580,12 @@ class DataSourceController @Inject()( } } - def getSynapseTypes(token: Option[String], - organizationId: String, + def getSynapseTypes(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -646,14 +595,12 @@ class DataSourceController @Inject()( } } - def checkSegmentIndexFile(token: Option[String], - organizationId: String, + def checkSegmentIndexFile(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { val segmentIndexFileOpt = segmentIndexFileService.getSegmentIndexFile(organizationId, datasetDirectoryName, dataLayerName).toOption Future.successful(Ok(Json.toJson(segmentIndexFileOpt.isDefined))) @@ -664,15 +611,13 @@ class DataSourceController @Inject()( * Query the segment index file for a single segment * @return List of bucketPositions as positions (not indices) of 32³ buckets in mag */ - def getSegmentIndex(token: Option[String], - organizationId: String, + def getSegmentIndex(organizationId: String, datasetDirectoryName: String, dataLayerName: String, segmentId: String): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { segmentIds <- segmentIdsForAgglomerateIdIfNeeded( organizationId, @@ -682,8 +627,7 @@ class DataSourceController @Inject()( request.body.editableMappingTracingId, segmentId.toLong, mappingNameForMeshFile = None, - omitMissing = false, - urlOrHeaderToken(token, request) + omitMissing = false ) fileMag <- segmentIndexFileService.readFileMag(organizationId, datasetDirectoryName, dataLayerName) topLeftsNested: Seq[Array[Vec3Int]] <- Fox.serialCombined(segmentIds)(sId => @@ -705,14 +649,12 @@ class DataSourceController @Inject()( * Query the segment index file for multiple segments * @return List of bucketPositions as indices of 32³ buckets */ - def querySegmentIndex(token: Option[String], - organizationId: String, + def querySegmentIndex(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[GetMultipleSegmentIndexParameters] = Action.async(validateJson[GetMultipleSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { segmentIdsAndBucketPositions <- Fox.serialCombined(request.body.segmentIds) { segmentOrAgglomerateId => for { @@ -724,8 +666,7 @@ class DataSourceController @Inject()( request.body.editableMappingTracingId, segmentOrAgglomerateId, mappingNameForMeshFile = None, - omitMissing = true, // assume agglomerate ids not present in the mapping belong to user-brushed segments - urlOrHeaderToken(token, request) + omitMissing = true // assume agglomerate ids not present in the mapping belong to user-brushed segments ) fileMag <- segmentIndexFileService.readFileMag(organizationId, datasetDirectoryName, dataLayerName) topLeftsNested: Seq[Array[Vec3Int]] <- Fox.serialCombined(segmentIds)(sId => @@ -740,14 +681,12 @@ class DataSourceController @Inject()( } } - def getSegmentVolume(token: Option[String], - organizationId: String, + def getSegmentVolume(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetDirectoryName, dataLayerName) volumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => @@ -764,14 +703,12 @@ class DataSourceController @Inject()( } } - def getSegmentBoundingBox(token: Option[String], - organizationId: String, + def getSegmentBoundingBox(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetDirectoryName, dataLayerName) boxes <- Fox.serialCombined(request.body.segmentIds) { segmentId => @@ -787,9 +724,10 @@ class DataSourceController @Inject()( } // Called directly by wk side - def exploreRemoteDataset(token: Option[String]): Action[ExploreRemoteDatasetRequest] = + def exploreRemoteDataset(): Action[ExploreRemoteDatasetRequest] = Action.async(validateJson[ExploreRemoteDatasetRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organizationId), token) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.administrateDataSources(request.body.organizationId)) { val reportMutable = ListBuffer[String]() val hasLocalFilesystemRequest = request.body.layerParameters.exists(param => new URI(param.remoteUri).getScheme == DataVaultService.schemeFile) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala index f4777fdb4b9..e801528a4b3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala @@ -35,8 +35,8 @@ class ExportsController @Inject()(webknossosClient: DSRemoteWebknossosClient, override def allowRemoteOrigin: Boolean = true - def download(token: Option[String], jobId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.downloadJobExport(jobId), urlOrHeaderToken(token, request)) { + def download(jobId: String): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.downloadJobExport(jobId)) { for { exportProperties <- webknossosClient.getJobExportProperties(jobId) fullPath = exportProperties.fullPathIn(dataBaseDir) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index aaeefc53e63..06978aabbe3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator @@ -50,14 +51,12 @@ class ZarrStreamingController @Inject()( * Uses the OME-NGFF standard (see https://ngff.openmicroscopy.org/latest/) */ def requestZAttrs( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetDirectoryName, @@ -69,14 +68,12 @@ class ZarrStreamingController @Inject()( } def requestZarrJson( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetDirectoryName, @@ -91,17 +88,14 @@ class ZarrStreamingController @Inject()( } } - def zAttrsWithAnnotationPrivateLink(token: Option[String], - accessToken: String, - dataLayerName: String = ""): Action[AnyContent] = + def zAttrsWithAnnotationPrivateLink(accessToken: String, dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => { + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => { remoteTracingstoreClient - .getOmeNgffHeader(annotationLayer.tracingId, annotationSource.tracingStoreUrl, relevantToken) + .getOmeNgffHeader(annotationLayer.tracingId, annotationSource.tracingStoreUrl)(relevantTokenContext) .map(ngffMetadata => Ok(Json.toJson(ngffMetadata))) }, orElse = annotationSource => @@ -117,17 +111,15 @@ class ZarrStreamingController @Inject()( ) } - def zarrJsonWithAnnotationPrivateLink(token: Option[String], - accessToken: String, - dataLayerName: String = ""): Action[AnyContent] = + def zarrJsonWithAnnotationPrivateLink(accessToken: String, dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => { + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => { remoteTracingstoreClient - .getZarrJsonGroupHeaderWithNgff(annotationLayer.tracingId, annotationSource.tracingStoreUrl, relevantToken) + .getZarrJsonGroupHeaderWithNgff(annotationLayer.tracingId, annotationSource.tracingStoreUrl)( + relevantTokenContext) .map(header => Ok(Json.toJson(header))) }, orElse = annotationSource => @@ -150,14 +142,12 @@ class ZarrStreamingController @Inject()( * Note that the result here is not necessarily equal to the file used in the underlying storage. */ def requestDataSource( - token: Option[String], organizationId: String, datasetDirectoryName: String, zarrVersion: Int, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { dataSource <- dataSourceRepository .findUsable(DataSourceId(datasetDirectoryName, organizationId)) @@ -206,14 +196,12 @@ class ZarrStreamingController @Inject()( } } - def dataSourceWithAnnotationPrivateLink(token: Option[String], - accessToken: String, - zarrVersion: Int): Action[AnyContent] = + def dataSourceWithAnnotationPrivateLink(accessToken: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND - relevantToken = if (annotationSource.accessViaPrivateLink) Some(accessToken) - else urlOrHeaderToken(token, request) + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) ~> NOT_FOUND + relevantTokenContext = if (annotationSource.accessViaPrivateLink) TokenContext(Some(accessToken)) + else tokenContextForRequest volumeAnnotationLayers = annotationSource.annotationLayers.filter(_.typ == AnnotationLayerType.Volume) dataSource <- dataSourceRepository .findUsable(DataSourceId(annotationSource.datasetDirectoryName, annotationSource.organizationId)) @@ -226,45 +214,37 @@ class ZarrStreamingController @Inject()( remoteTracingstoreClient.getVolumeLayerAsZarrLayer(l.tracingId, Some(l.name), annotationSource.tracingStoreUrl, - relevantToken, - zarrVersion)) + zarrVersion)(relevantTokenContext)) allLayer = dataSourceLayers ++ annotationLayers zarrSource = GenericDataSource[DataLayer](dataSource.id, allLayer, dataSource.scale) } yield Ok(Json.toJson(zarrSource)) } def requestRawZarrCube( - token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { rawZarrCube(organizationId, datasetDirectoryName, dataLayerName, mag, coordinates) } } - def rawZarrCubePrivateLink(token: Option[String], - accessToken: String, + def rawZarrCubePrivateLink(accessToken: String, dataLayerName: String, mag: String, coordinates: String): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getRawZarrCube(annotationLayer.tracingId, - mag, - coordinates, - annotationSource.tracingStoreUrl, - relevantToken) + .getRawZarrCube(annotationLayer.tracingId, mag, coordinates, annotationSource.tracingStoreUrl)( + relevantTokenContext) .map(Ok(_)), orElse = annotationSource => rawZarrCube(annotationSource.organizationId, @@ -311,15 +291,14 @@ class ZarrStreamingController @Inject()( _ <- bool2Fox(notFoundIndices.isEmpty) ~> "zarr.chunkNotFound" ~> NOT_FOUND } yield Ok(data) - def requestZArray(token: Option[String], - organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - mag: String, + def requestZArray( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { zArray(organizationId, datasetDirectoryName, dataLayerName, mag) } } @@ -336,15 +315,14 @@ class ZarrStreamingController @Inject()( zarrHeader = ZarrHeader.fromLayer(dataLayer, magParsed) } yield Ok(Json.toJson(zarrHeader)) - def requestZarrJsonForMag(token: Option[String], - organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - mag: String, + def requestZarrJsonForMag( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { zarrJsonForMag(organizationId, datasetDirectoryName, dataLayerName, mag) } } @@ -361,67 +339,58 @@ class ZarrStreamingController @Inject()( zarrHeader = Zarr3ArrayHeader.fromDataLayer(dataLayer, magParsed) } yield Ok(Json.toJson(zarrHeader)) - def zArrayPrivateLink(token: Option[String], - accessToken: String, - dataLayerName: String, - mag: String): Action[AnyContent] = Action.async { implicit request => - ifIsAnnotationLayerOrElse( - token, - accessToken, - dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => - remoteTracingstoreClient - .getZArray(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl, relevantToken) - .map(z => Ok(Json.toJson(z))), - orElse = annotationSource => - zArray(annotationSource.organizationId, annotationSource.datasetDirectoryName, dataLayerName, mag) - ) + def zArrayPrivateLink(accessToken: String, dataLayerName: String, mag: String): Action[AnyContent] = Action.async { + implicit request => + ifIsAnnotationLayerOrElse( + accessToken, + dataLayerName, + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => + remoteTracingstoreClient + .getZArray(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl)(relevantTokenContext) + .map(z => Ok(Json.toJson(z))), + orElse = annotationSource => + zArray(annotationSource.organizationId, annotationSource.datasetDirectoryName, dataLayerName, mag) + ) } - def zarrJsonPrivateLink(token: Option[String], - accessToken: String, - dataLayerName: String, - mag: String): Action[AnyContent] = Action.async { implicit request => - ifIsAnnotationLayerOrElse( - token, - accessToken, - dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => - remoteTracingstoreClient - .getZarrJson(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl, relevantToken) - .map(z => Ok(Json.toJson(z))), - orElse = annotationSource => - zarrJsonForMag(annotationSource.organizationId, annotationSource.datasetDirectoryName, dataLayerName, mag) - ) + def zarrJsonPrivateLink(accessToken: String, dataLayerName: String, mag: String): Action[AnyContent] = Action.async { + implicit request => + ifIsAnnotationLayerOrElse( + accessToken, + dataLayerName, + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => + remoteTracingstoreClient + .getZarrJson(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl)(relevantTokenContext) + .map(z => Ok(Json.toJson(z))), + orElse = annotationSource => + zarrJsonForMag(annotationSource.organizationId, annotationSource.datasetDirectoryName, dataLayerName, mag) + ) } private def ifIsAnnotationLayerOrElse( - token: Option[String], accessToken: String, dataLayerName: String, - ifIsAnnotationLayer: (AnnotationLayer, AnnotationSource, Option[String]) => Fox[Result], + ifIsAnnotationLayer: (AnnotationLayer, AnnotationSource, TokenContext) => Fox[Result], orElse: AnnotationSource => Fox[Result])(implicit request: Request[Any]): Fox[Result] = for { - annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND - relevantToken = if (annotationSource.accessViaPrivateLink) Some(accessToken) - else urlOrHeaderToken(token, request) + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) ~> NOT_FOUND + relevantTokenContext = if (annotationSource.accessViaPrivateLink) TokenContext(Some(accessToken)) + else tokenContextForRequest layer = annotationSource.getAnnotationLayer(dataLayerName) result <- layer match { - case Some(annotationLayer) => ifIsAnnotationLayer(annotationLayer, annotationSource, relevantToken) + case Some(annotationLayer) => ifIsAnnotationLayer(annotationLayer, annotationSource, relevantTokenContext) case None => orElse(annotationSource) } } yield result - def requestDataLayerMagFolderContents(token: Option[String], - organizationId: String, + def requestDataLayerMagFolderContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { dataLayerMagFolderContents(organizationId, datasetDirectoryName, dataLayerName, mag, zarrVersion) } } @@ -447,23 +416,20 @@ class ZarrStreamingController @Inject()( additionalEntries )).withHeaders() - def dataLayerMagFolderContentsPrivateLink(token: Option[String], - accessToken: String, + def dataLayerMagFolderContentsPrivateLink(accessToken: String, dataLayerName: String, mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient .getDataLayerMagFolderContents(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl, - relevantToken, - zarrVersion) + zarrVersion)(relevantTokenContext) .map( layers => Ok( @@ -481,14 +447,12 @@ class ZarrStreamingController @Inject()( ) } - def requestDataLayerFolderContents(token: Option[String], - organizationId: String, + def requestDataLayerFolderContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { dataLayerFolderContents(organizationId, datasetDirectoryName, dataLayerName, zarrVersion) } } @@ -514,21 +478,17 @@ class ZarrStreamingController @Inject()( additionalFiles ++ mags.map(_.toMagLiteral(allowScalar = true)) )).withHeaders() - def dataLayerFolderContentsPrivateLink(token: Option[String], - accessToken: String, + def dataLayerFolderContentsPrivateLink(accessToken: String, dataLayerName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getDataLayerFolderContents(annotationLayer.tracingId, - annotationSource.tracingStoreUrl, - relevantToken, - zarrVersion) + .getDataLayerFolderContents(annotationLayer.tracingId, annotationSource.tracingStoreUrl, zarrVersion)( + relevantTokenContext) .map( layers => Ok( @@ -545,14 +505,12 @@ class ZarrStreamingController @Inject()( ) } - def requestDataSourceFolderContents(token: Option[String], - organizationId: String, + def requestDataSourceFolderContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { dataSource <- dataSourceRepository .findUsable(DataSourceId(datasetDirectoryName, organizationId)) @@ -570,12 +528,10 @@ class ZarrStreamingController @Inject()( } } - def dataSourceFolderContentsPrivateLink(token: Option[String], - accessToken: String, - zarrVersion: Int): Action[AnyContent] = + def dataSourceFolderContentsPrivateLink(accessToken: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) dataSource <- dataSourceRepository .findUsable(DataSourceId(annotationSource.datasetDirectoryName, annotationSource.organizationId)) .toFox ?~> Messages("dataSource.notFound") ~> NOT_FOUND @@ -597,28 +553,26 @@ class ZarrStreamingController @Inject()( )) } - def requestZGroup(token: Option[String], - organizationId: String, + def requestZGroup(organizationId: String, datasetDirectoryName: String, - dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId)), - urlOrHeaderToken(token, request)) { - Ok(zGroupJson) + dataLayerName: String = ""): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContextForSyncBlock( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + Ok(zGroupJson) + } } - } private def zGroupJson: JsValue = Json.toJson(NgffGroupHeader(zarr_format = 2)) - def zGroupPrivateLink(token: Option[String], accessToken: String, dataLayerName: String): Action[AnyContent] = + def zGroupPrivateLink(accessToken: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getZGroup(annotationLayer.tracingId, annotationSource.tracingStoreUrl, relevantToken) + .getZGroup(annotationLayer.tracingId, annotationSource.tracingStoreUrl)(relevantTokenContext) .map(Ok(_)), orElse = _ => Fox.successful(Ok(zGroupJson)) ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala index 98e10cd876d..750b3733283 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala @@ -6,7 +6,7 @@ import com.scalableminds.webknossos.datastore.models.BucketPosition trait WKWDataFormatHelper { - val dataFileExtension: String = "wkw" + private val dataFileExtension: String = "wkw" val FILENAME_HEADER_WKW: String = s"header.$dataFileExtension" // Assumes single-bucket wkw files, as for volume tracings diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala index a6d0c65c8c5..b8b5286f55a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.datastore.models.annotation import com.scalableminds.util.tools.Fox.bool2Fox import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.Annotation.AnnotationLayerProto import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType @@ -15,30 +16,17 @@ case class AnnotationLayer( typ: AnnotationLayerType, name: String, stats: JsObject, -) - -object AnnotationLayerStatistics { - - def zeroedForTyp(typ: AnnotationLayerType): JsObject = typ match { - case AnnotationLayerType.Skeleton => - Json.obj( - "treeCount" -> 0, - "nodeCount" -> 0, - "edgeCount" -> 0, - "branchPointCount" -> 0 - ) - case AnnotationLayerType.Volume => - Json.obj( - "segmentCount" -> 0 - ) - } - - def unknown: JsObject = Json.obj() +) { + def toProto: AnnotationLayerProto = + AnnotationLayerProto(tracingId, name, AnnotationLayerType.toProto(typ)) } object AnnotationLayer extends FoxImplicits { implicit val jsonFormat: OFormat[AnnotationLayer] = Json.format[AnnotationLayer] + def fromProto(p: AnnotationLayerProto): AnnotationLayer = + AnnotationLayer(p.tracingId, AnnotationLayerType.fromProto(p.typ), p.name, AnnotationLayerStatistics.unknown) + val defaultSkeletonLayerName: String = "Skeleton" val defaultVolumeLayerName: String = "Volume" @@ -63,6 +51,25 @@ object AnnotationLayer extends FoxImplicits { } } +object AnnotationLayerStatistics { + + def zeroedForType(typ: AnnotationLayerType): JsObject = typ match { + case AnnotationLayerType.Skeleton => + Json.obj( + "treeCount" -> 0, + "nodeCount" -> 0, + "edgeCount" -> 0, + "branchPointCount" -> 0 + ) + case AnnotationLayerType.Volume => + Json.obj( + "segmentCount" -> 0 + ) + } + + def unknown: JsObject = Json.obj() +} + case class FetchedAnnotationLayer(tracingId: String, name: String, tracing: Either[SkeletonTracing, VolumeTracing], diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala index 0a9576b91aa..2593bedce4f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala @@ -1,8 +1,23 @@ package com.scalableminds.webknossos.datastore.models.annotation import com.scalableminds.util.enumeration.ExtendedEnumeration +import com.scalableminds.webknossos.datastore.Annotation.AnnotationLayerTypeProto object AnnotationLayerType extends ExtendedEnumeration { type AnnotationLayerType = Value val Skeleton, Volume = Value + + def toProto(annotationLayerType: AnnotationLayerType): AnnotationLayerTypeProto = + annotationLayerType match { + case Skeleton => AnnotationLayerTypeProto.Skeleton + case Volume => AnnotationLayerTypeProto.Volume + } + + def fromProto(p: AnnotationLayerTypeProto): AnnotationLayerType = + p match { + case AnnotationLayerTypeProto.Skeleton => Skeleton + case AnnotationLayerTypeProto.Volume => Volume + case AnnotationLayerTypeProto.Unrecognized(_) => + Volume // unrecognized should never happen, artifact of proto code generation + } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala index 7f94e778e96..d5676562d96 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.rpc +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.mvc.MimeTypes import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging @@ -10,6 +11,7 @@ import play.api.libs.ws._ import scalapb.{GeneratedMessage, GeneratedMessageCompanion} import java.io.File +import java.nio.charset.StandardCharsets import scala.concurrent.ExecutionContext import scala.concurrent.duration._ @@ -26,6 +28,9 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: this } + def withTokenFromContext(implicit tc: TokenContext): RPCRequest = + addQueryStringOptional("token", tc.userTokenOpt) + def addHttpHeaders(hdrs: (String, String)*): RPCRequest = { request = request.addHttpHeaders(hdrs: _*) this @@ -109,7 +114,7 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: parseJsonResponse(performRequest) } - def postWithJsonResponse[T: Reads]: Fox[T] = { + def postWithJsonResponse[T: Reads](): Fox[T] = { request = request.withMethod("POST") parseJsonResponse(performRequest) } @@ -167,10 +172,16 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: parseProtoResponse(performRequest)(companion) } - def postJson[J: Writes](body: J = Json.obj()): Unit = { + def postJson[J: Writes](body: J = Json.obj()): Fox[Unit] = { request = request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("POST") - performRequest + performRequest.map(_ => ()) + } + + def postProto[T <: GeneratedMessage](body: T): Fox[Unit] = { + request = + request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> protobufMimeType).withBody(body.toByteArray).withMethod("POST") + performRequest.map(_ => ()) } def postProtoWithJsonResponse[T <: GeneratedMessage, J: Reads](body: T): Fox[J] = { @@ -186,6 +197,11 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: parseProtoResponse(performRequest)(companion) } + def postWithProtoResponse[T <: GeneratedMessage]()(companion: GeneratedMessageCompanion[T]): Fox[T] = { + request = request.withMethod("POST") + parseProtoResponse(performRequest)(companion) + } + private def performRequest: Fox[WSResponse] = { if (verbose) { logger.debug( @@ -199,7 +215,7 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: Full(result) } else { val errorMsg = s"Unsuccessful WS request to $url (ID: $id)." + - s"Status: ${result.status}. Response: ${result.bodyAsBytes.map(_.toChar).mkString.take(2000)}" + s"Status: ${result.status}. Response: ${new String(result.bodyAsBytes.toArray, StandardCharsets.UTF_8).take(2000)}" logger.error(errorMsg) Failure(errorMsg.take(400)) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala index 2e89f193607..b44d93c0468 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.datastore.services import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.enumeration.ExtendedEnumeration import com.scalableminds.util.tools.Fox @@ -19,7 +20,7 @@ object AccessMode extends ExtendedEnumeration { object AccessResourceType extends ExtendedEnumeration { type AccessResourceType = Value - val datasource, tracing, webknossos, jobExport = Value + val datasource, tracing, annotation, webknossos, jobExport = Value } case class UserAccessAnswer(granted: Boolean, msg: Option[String] = None) @@ -42,9 +43,16 @@ object UserAccessRequest { def readTracing(tracingId: String): UserAccessRequest = UserAccessRequest(DataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.read) + def writeTracing(tracingId: String): UserAccessRequest = UserAccessRequest(DataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.write) + def readAnnotation(annotationId: String): UserAccessRequest = + UserAccessRequest(DataSourceId(annotationId, ""), AccessResourceType.annotation, AccessMode.read) + + def writeAnnotation(annotationId: String): UserAccessRequest = + UserAccessRequest(DataSourceId(annotationId, ""), AccessResourceType.annotation, AccessMode.write) + def downloadJobExport(jobId: String): UserAccessRequest = UserAccessRequest(DataSourceId(jobId, ""), AccessResourceType.jobExport, AccessMode.read) @@ -59,28 +67,27 @@ trait AccessTokenService { private lazy val accessAnswersCache: AlfuCache[(UserAccessRequest, Option[String]), UserAccessAnswer] = AlfuCache(timeToLive = AccessExpiration, timeToIdle = AccessExpiration) - def validateAccessForSyncBlock(accessRequest: UserAccessRequest, token: Option[String])(block: => Result)( - implicit ec: ExecutionContext): Fox[Result] = - validateAccess(accessRequest, token) { + def validateAccessFromTokenContextForSyncBlock(accessRequest: UserAccessRequest)( + block: => Result)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Result] = + validateAccessFromTokenContext(accessRequest) { Future.successful(block) } - def validateAccess(accessRequest: UserAccessRequest, token: Option[String])(block: => Future[Result])( - implicit ec: ExecutionContext): Fox[Result] = + def validateAccessFromTokenContext(accessRequest: UserAccessRequest)( + block: => Future[Result])(implicit ec: ExecutionContext, tc: TokenContext): Fox[Result] = for { - userAccessAnswer <- hasUserAccess(accessRequest, token) ?~> "Failed to check data access, token may be expired, consider reloading." + userAccessAnswer <- hasUserAccess(accessRequest) ?~> "Failed to check data access, token may be expired, consider reloading." result <- executeBlockOnPositiveAnswer(userAccessAnswer, block) } yield result - private def hasUserAccess(accessRequest: UserAccessRequest, token: Option[String])( - implicit ec: ExecutionContext): Fox[UserAccessAnswer] = - accessAnswersCache.getOrLoad((accessRequest, token), - _ => remoteWebknossosClient.requestUserAccess(token, accessRequest)) + private def hasUserAccess(accessRequest: UserAccessRequest)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[UserAccessAnswer] = + accessAnswersCache.getOrLoad((accessRequest, tc.userTokenOpt), + _ => remoteWebknossosClient.requestUserAccess(accessRequest)) - def assertUserAccess(accessRequest: UserAccessRequest, token: Option[String])( - implicit ec: ExecutionContext): Fox[Unit] = + def assertUserAccess(accessRequest: UserAccessRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = for { - userAccessAnswer <- hasUserAccess(accessRequest, token) ?~> "Failed to check data access, token may be expired, consider reloading." + userAccessAnswer <- hasUserAccess(accessRequest) ?~> "Failed to check data access, token may be expired, consider reloading." _ <- Fox.bool2Fox(userAccessAnswer.granted) ?~> userAccessAnswer.msg.getOrElse("Access forbidden.") } yield () diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala index 11c560f31cd..4da043fb9f2 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.datastore.services import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox @@ -49,14 +50,15 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, (binaryDataService, mappingService, config.Datastore.AdHocMesh.timeout, config.Datastore.AdHocMesh.actorPoolSize) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.dataStoreAdHocMeshService - def loadFor(token: Option[String], - organizationId: String, + def loadFor(organizationId: String, datasetDirectoryName: String, dataLayerName: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Array[Byte]] = + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, + m: MessagesProvider, + tc: TokenContext): Fox[Array[Byte]] = fullMeshRequest.meshFileName match { case Some(_) => - loadFullMeshFromMeshfile(token, organizationId, datasetDirectoryName, dataLayerName, fullMeshRequest) + loadFullMeshFromMeshfile(organizationId, datasetDirectoryName, dataLayerName, fullMeshRequest) case None => loadFullMeshFromAdHoc(organizationId, datasetDirectoryName, dataLayerName, fullMeshRequest) } @@ -113,12 +115,12 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, } yield allVertices } - private def loadFullMeshFromMeshfile( - token: Option[String], - organizationId: String, - datasetDirectoryName: String, - layerName: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Array[Byte]] = + private def loadFullMeshFromMeshfile(organizationId: String, + datasetDirectoryName: String, + layerName: String, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, + m: MessagesProvider, + tc: TokenContext): Fox[Array[Byte]] = for { meshFileName <- fullMeshRequest.meshFileName.toFox ?~> "meshFileName.needed" before = Instant.now @@ -134,8 +136,7 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, fullMeshRequest.editableMappingTracingId, fullMeshRequest.segmentId, mappingNameForMeshFile, - omitMissing = false, - token + omitMissing = false ) chunkInfos: WebknossosSegmentInfo <- meshFileService.listMeshChunksForSegmentsMerged(organizationId, datasetDirectoryName, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala index 2924c0687e4..5bd69d4d7c9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala @@ -1,8 +1,8 @@ package com.scalableminds.webknossos.datastore.services import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.dataformats.layers.ZarrSegmentationLayer import com.scalableminds.webknossos.datastore.datareaders.zarr.{NgffMetadata, ZarrHeader} import com.scalableminds.webknossos.datastore.datareaders.zarr3.{Zarr3ArrayHeader, Zarr3GroupHeader} @@ -21,88 +21,63 @@ object EditableMappingSegmentListResult { class DSRemoteTracingstoreClient @Inject()( rpc: RPC, - config: DataStoreConfig, val lifecycle: ApplicationLifecycle, ) extends LazyLogging with FoxImplicits { + private def getZarrVersionDependantSubPath = (zarrVersion: Int) => if (zarrVersion == 2) "zarr" else "zarr3_experimental" - def getZArray(tracingId: String, mag: String, tracingStoreUri: String, token: Option[String]): Fox[ZarrHeader] = - rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/$mag/.zarray") - .addQueryStringOptional("token", token) + def getZArray(tracingId: String, mag: String, tracingStoreUri: String)(implicit tc: TokenContext): Fox[ZarrHeader] = + rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/$mag/.zarray").withTokenFromContext .getWithJsonResponse[ZarrHeader] - def getZarrJson(tracingId: String, - mag: String, - tracingStoreUri: String, - token: Option[String]): Fox[Zarr3ArrayHeader] = - rpc(s"$tracingStoreUri/tracings/volume/zarr3_experimental/$tracingId/$mag/zarr.json") - .addQueryStringOptional("token", token) + def getZarrJson(tracingId: String, mag: String, tracingStoreUri: String)( + implicit tc: TokenContext): Fox[Zarr3ArrayHeader] = + rpc(s"$tracingStoreUri/tracings/volume/zarr3_experimental/$tracingId/$mag/zarr.json").withTokenFromContext .getWithJsonResponse[Zarr3ArrayHeader] def getVolumeLayerAsZarrLayer(tracingId: String, tracingName: Option[String], tracingStoreUri: String, - token: Option[String], - zarrVersion: Int): Fox[ZarrSegmentationLayer] = { + zarrVersion: Int)(implicit tc: TokenContext): Fox[ZarrSegmentationLayer] = { val zarrVersionDependantSubPath = getZarrVersionDependantSubPath(zarrVersion) - rpc(s"$tracingStoreUri/tracings/volume/$zarrVersionDependantSubPath/$tracingId/zarrSource") - .addQueryStringOptional("token", token) + rpc(s"$tracingStoreUri/tracings/volume/$zarrVersionDependantSubPath/$tracingId/zarrSource").withTokenFromContext .addQueryStringOptional("tracingName", tracingName) .getWithJsonResponse[ZarrSegmentationLayer] } - def getOmeNgffHeader(tracingId: String, tracingStoreUri: String, token: Option[String]): Fox[NgffMetadata] = - rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/.zattrs") - .addQueryStringOptional("token", token) + def getOmeNgffHeader(tracingId: String, tracingStoreUri: String)(implicit tc: TokenContext): Fox[NgffMetadata] = + rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/.zattrs").withTokenFromContext .getWithJsonResponse[NgffMetadata] - def getZarrJsonGroupHeaderWithNgff(tracingId: String, - tracingStoreUri: String, - token: Option[String]): Fox[Zarr3GroupHeader] = - rpc(s"$tracingStoreUri/tracings/volume/zarr3_experimental/$tracingId/zarr.json") - .addQueryStringOptional("token", token) + def getZarrJsonGroupHeaderWithNgff(tracingId: String, tracingStoreUri: String)( + implicit tc: TokenContext): Fox[Zarr3GroupHeader] = + rpc(s"$tracingStoreUri/tracings/volume/zarr3_experimental/$tracingId/zarr.json").withTokenFromContext .getWithJsonResponse[Zarr3GroupHeader] - def getRawZarrCube(tracingId: String, - mag: String, - cxyz: String, - tracingStoreUri: String, - token: Option[String]): Fox[Array[Byte]] = - rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/$mag/$cxyz").silent - .addQueryStringOptional("token", token) - .getWithBytesResponse + def getRawZarrCube(tracingId: String, mag: String, cxyz: String, tracingStoreUri: String)( + implicit tc: TokenContext): Fox[Array[Byte]] = + rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/$mag/$cxyz").silent.withTokenFromContext.getWithBytesResponse - def getDataLayerMagFolderContents(tracingId: String, - mag: String, - tracingStoreUri: String, - token: Option[String], - zarrVersion: Int): Fox[List[String]] = - rpc(s"$tracingStoreUri/tracings/volume/${getZarrVersionDependantSubPath(zarrVersion)}/json/$tracingId/$mag") - .addQueryStringOptional("token", token) + def getDataLayerMagFolderContents(tracingId: String, mag: String, tracingStoreUri: String, zarrVersion: Int)( + implicit tc: TokenContext): Fox[List[String]] = + rpc(s"$tracingStoreUri/tracings/volume/${getZarrVersionDependantSubPath(zarrVersion)}/json/$tracingId/$mag").withTokenFromContext .getWithJsonResponse[List[String]] - def getDataLayerFolderContents(tracingId: String, - tracingStoreUri: String, - token: Option[String], - zarrVersion: Int): Fox[List[String]] = - rpc(s"$tracingStoreUri/tracings/volume/${getZarrVersionDependantSubPath(zarrVersion)}/json/$tracingId") - .addQueryStringOptional("token", token) + def getDataLayerFolderContents(tracingId: String, tracingStoreUri: String, zarrVersion: Int)( + implicit tc: TokenContext): Fox[List[String]] = + rpc(s"$tracingStoreUri/tracings/volume/${getZarrVersionDependantSubPath(zarrVersion)}/json/$tracingId").withTokenFromContext .getWithJsonResponse[List[String]] - def getZGroup(tracingId: String, tracingStoreUri: String, token: Option[String]): Fox[JsObject] = - rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/.zgroup") - .addQueryStringOptional("token", token) - .getWithJsonResponse[JsObject] + def getZGroup(tracingId: String, tracingStoreUri: String)(implicit tc: TokenContext): Fox[JsObject] = + rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/.zgroup").withTokenFromContext.getWithJsonResponse[JsObject] - def getEditableMappingSegmentIdsForAgglomerate(tracingStoreUri: String, - tracingId: String, - agglomerateId: Long, - token: Option[String]): Fox[EditableMappingSegmentListResult] = + def getEditableMappingSegmentIdsForAgglomerate(tracingStoreUri: String, tracingId: String, agglomerateId: Long)( + implicit tc: TokenContext): Fox[EditableMappingSegmentListResult] = rpc(s"$tracingStoreUri/tracings/mapping/$tracingId/segmentsForAgglomerate") .addQueryString("agglomerateId" -> agglomerateId.toString) - .addQueryStringOptional("token", token) + .withTokenFromContext .silent .getWithJsonResponse[EditableMappingSegmentListResult] } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala index fa669171100..01675ec3f93 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala @@ -3,6 +3,7 @@ package com.scalableminds.webknossos.datastore.services import org.apache.pekko.actor.ActorSystem import com.google.inject.Inject import com.google.inject.name.Named +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.DataStoreConfig @@ -36,7 +37,7 @@ object TracingStoreInfo { } trait RemoteWebknossosClient { - def requestUserAccess(token: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] + def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] } class DSRemoteWebknossosClient @Inject()( @@ -71,21 +72,17 @@ class DSRemoteWebknossosClient @Inject()( .addQueryString("key" -> dataStoreKey) .put(dataSource) - def getUnfinishedUploadsForUser(userTokenOpt: Option[String], organizationName: String): Fox[List[UnfinishedUpload]] = + def getUnfinishedUploadsForUser(organizationName: String)(implicit tc: TokenContext): Fox[List[UnfinishedUpload]] = for { - userToken <- option2Fox(userTokenOpt) ?~> "reserveUpload.noUserToken" unfinishedUploads <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/getUnfinishedUploadsForUser") .addQueryString("key" -> dataStoreKey) - .addQueryString("token" -> userToken) .addQueryString("organizationName" -> organizationName) + .withTokenFromContext .getWithJsonResponse[List[UnfinishedUpload]] } yield unfinishedUploads - def reportUpload(dataSourceId: DataSourceId, - datasetSizeBytes: Long, - needsConversion: Boolean, - viaAddRoute: Boolean, - userToken: Option[String]): Fox[String] = + def reportUpload(dataSourceId: DataSourceId, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean)( + implicit tc: TokenContext): Fox[String] = for { uploadedDatasetIdJson <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reportDatasetUpload") .addQueryString("key" -> dataStoreKey) @@ -93,8 +90,8 @@ class DSRemoteWebknossosClient @Inject()( .addQueryString("needsConversion" -> needsConversion.toString) .addQueryString("viaAddRoute" -> viaAddRoute.toString) .addQueryString("datasetSizeBytes" -> datasetSizeBytes.toString) - .addQueryStringOptional("token", userToken) - .postWithJsonResponse[JsValue] + .withTokenFromContext + .postWithJsonResponse[JsValue]() uploadedDatasetId <- (uploadedDatasetIdJson \ "id").validate[String].asOpt.toFox ?~> "uploadedDatasetId.invalid" } yield uploadedDatasetId @@ -104,13 +101,12 @@ class DSRemoteWebknossosClient @Inject()( .silent .put(dataSources) - def reserveDataSourceUpload(info: ReserveUploadInformation, - userTokenOpt: Option[String]): Fox[ReserveAdditionalInformation] = + def reserveDataSourceUpload(info: ReserveUploadInformation)( + implicit tc: TokenContext): Fox[ReserveAdditionalInformation] = for { - userToken <- option2Fox(userTokenOpt) ?~> "reserveUpload.noUserToken" reserveUploadInfo <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reserveUpload") .addQueryString("key" -> dataStoreKey) - .addQueryString("token" -> userToken) + .withTokenFromContext .postWithJsonResponse[ReserveUploadInformation, ReserveAdditionalInformation](info) } yield reserveUploadInfo @@ -123,10 +119,10 @@ class DSRemoteWebknossosClient @Inject()( .addQueryString("key" -> dataStoreKey) .getWithJsonResponse[JobExportProperties] - override def requestUserAccess(userToken: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] = + override def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/validateUserAccess") .addQueryString("key" -> dataStoreKey) - .addQueryStringOptional("token", userToken) + .withTokenFromContext .postJsonWithJsonResponse[UserAccessRequest, UserAccessAnswer](accessRequest) private lazy val tracingstoreUriCache: AlfuCache[String, String] = AlfuCache() @@ -146,13 +142,13 @@ class DSRemoteWebknossosClient @Inject()( private lazy val annotationSourceCache: AlfuCache[(String, Option[String]), AnnotationSource] = AlfuCache(timeToLive = 5 seconds, timeToIdle = 5 seconds) - def getAnnotationSource(accessToken: String, userToken: Option[String]): Fox[AnnotationSource] = + def getAnnotationSource(accessToken: String)(implicit tc: TokenContext): Fox[AnnotationSource] = annotationSourceCache.getOrLoad( - (accessToken, userToken), + (accessToken, tc.userTokenOpt), _ => rpc(s"$webknossosUri/api/annotations/source/$accessToken") .addQueryString("key" -> dataStoreKey) - .addQueryStringOptional("userToken", userToken) + .withTokenFromContext .getWithJsonResponse[AnnotationSource] ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala index bedad238561..f2f4d5f2998 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala @@ -1,13 +1,13 @@ package com.scalableminds.webknossos.datastore.services -import org.apache.pekko.actor.ActorSystem import com.google.inject.Inject import com.google.inject.name.Named +import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId} import com.scalableminds.webknossos.datastore.storage.TemporaryStore -import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging +import org.apache.pekko.actor.ActorSystem import play.api.i18n.{Messages, MessagesProvider} import scala.concurrent.ExecutionContext @@ -29,7 +29,7 @@ class DataSourceRepository @Inject()( } yield (dataSource, dataLayer) def findUsable(id: DataSourceId): Option[DataSource] = - find(id).flatMap(_.toUsable) + get(id).flatMap(_.toUsable) def updateDataSource(dataSource: InboxDataSource): Fox[Unit] = for { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala index b99dcf804e7..50e9c7bf3da 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala @@ -61,18 +61,19 @@ class DatasetErrorLoggingService @Inject()( Fox.successful(data) } case Failure(msg, Full(e: InternalError), _) => - logger.error(s"Caught internal error while $label for $dataSourceId:", e) + logger.error(s"Caught internal error ($msg) while $label for $dataSourceId:", e) applicationHealthService.pushError(e) Fox.failure(msg, Full(e)) case Failure(msg, Full(exception), _) => if (shouldLog(dataSourceId.organizationId, dataSourceId.directoryName)) { - logger.error(s"Error while $label for $dataSourceId Stack trace: ${TextUtils.stackTraceAsString(exception)} ") + logger.error( + s"Error while $label for $dataSourceId: $msg – Stack trace: ${TextUtils.stackTraceAsString(exception)} ") registerLogged(dataSourceId.organizationId, dataSourceId.directoryName) } Fox.failure(msg, Full(exception)) case Failure(msg, Empty, _) => if (shouldLog(dataSourceId.organizationId, dataSourceId.directoryName)) { - logger.error(s"Error while $label for $dataSourceId, Empty failure") + logger.error(s"Error while $label for $dataSourceId, Failure without exception: $msg") registerLogged(dataSourceId.organizationId, dataSourceId.directoryName) } Fox.failure(msg) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala index a5bff4da288..e5831d8a580 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.services +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} import com.scalableminds.webknossos.datastore.storage.AgglomerateFileKey @@ -21,8 +22,8 @@ trait MeshMappingHelper { editableMappingTracingId: Option[String], agglomerateId: Long, mappingNameForMeshFile: Option[String], - omitMissing: Boolean, // If true, failing lookups in the agglomerate file will just return empty list. - token: Option[String])(implicit ec: ExecutionContext): Fox[List[Long]] = + omitMissing: Boolean // If true, failing lookups in the agglomerate file will just return empty list. + )(implicit ec: ExecutionContext, tc: TokenContext): Fox[List[Long]] = (targetMappingName, editableMappingTracingId) match { case (None, None) => // No mapping selected, assume id matches meshfile @@ -58,8 +59,7 @@ trait MeshMappingHelper { tracingstoreUri <- dsRemoteWebknossosClient.getTracingstoreUri segmentIdsResult <- dsRemoteTracingstoreClient.getEditableMappingSegmentIdsForAgglomerate(tracingstoreUri, tracingId, - agglomerateId, - token) + agglomerateId) segmentIds <- if (segmentIdsResult.agglomerateIdIsPresent) Fox.successful(segmentIdsResult.segmentIds) else // the agglomerate id is not present in the editable mapping. Fetch its info from the base mapping. diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala index 8f3aa0b1897..9f6926b9cb8 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala @@ -1,19 +1,9 @@ package com.scalableminds.webknossos.datastore.services.uploading +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.io.PathUtils import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.dataformats.layers.{ - N5DataLayer, - N5SegmentationLayer, - PrecomputedDataLayer, - PrecomputedSegmentationLayer, - WKWDataLayer, - WKWSegmentationLayer, - Zarr3DataLayer, - Zarr3SegmentationLayer, - ZarrDataLayer, - ZarrSegmentationLayer -} +import com.scalableminds.webknossos.datastore.dataformats.layers._ import com.scalableminds.webknossos.datastore.models.VoxelSize import com.scalableminds.webknossos.datastore.models.datasource._ import com.scalableminds.webknossos.datastore.services.{ @@ -61,7 +51,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, private def uploadDirectory(organizationId: String, datasetDirectoryName: String): Path = dataBaseDir.resolve(organizationId).resolve(datasetDirectoryName) - def composeDataset(composeRequest: ComposeRequest, userToken: Option[String]): Fox[(DataSource, String)] = + def composeDataset(composeRequest: ComposeRequest)(implicit tc: TokenContext): Fox[(DataSource, String)] = for { _ <- dataSourceService.assertDataDirWritable(composeRequest.organizationId) reserveUploadInfo = ReserveUploadInformation( @@ -74,7 +64,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, List(), Some(composeRequest.targetFolderId) ) - reservedAdditionalInfo <- remoteWebknossosClient.reserveDataSourceUpload(reserveUploadInfo, userToken) ?~> "Failed to reserve upload." + reservedAdditionalInfo <- remoteWebknossosClient.reserveDataSourceUpload(reserveUploadInfo) ?~> "Failed to reserve upload." directory = uploadDirectory(composeRequest.organizationId, reservedAdditionalInfo.directoryName) _ = PathUtils.ensureDirectory(directory) dataSource <- createDatasource(composeRequest, @@ -87,7 +77,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, private def getLayerFromComposeLayer(composeLayer: ComposeRequestLayer, uploadDir: Path): Fox[DataLayer] = for { - dataSource <- Fox.option2Fox(dataSourceRepository.find(composeLayer.dataSourceId)) + dataSource <- Fox.option2Fox(dataSourceRepository.get(composeLayer.dataSourceId)) ds <- Fox.option2Fox(dataSource.toUsable) layer <- Fox.option2Fox(ds.dataLayers.find(_.name == composeLayer.sourceName)) applyCoordinateTransformations = (cOpt: Option[List[CoordinateTransformation]]) => diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/slacknotification/SlackClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/slacknotification/SlackClient.scala index 99491288037..45f2d4e0766 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/slacknotification/SlackClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/slacknotification/SlackClient.scala @@ -45,6 +45,7 @@ class SlackClient(rpc: RPC, slackUri: String, name: String, verboseLoggingEnable rpc(slackUri).postJson( Json.obj("attachments" -> Json.arr(jsonMessage)) ) + () } else { logger.warn( s"Not sending slack notification as rate limit of $messagesSentSinceReset was reached. Message was: $jsonMessage") diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RedisTemporaryStore.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RedisTemporaryStore.scala index fde054f6772..7b064b078a6 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RedisTemporaryStore.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RedisTemporaryStore.scala @@ -44,6 +44,9 @@ trait RedisTemporaryStore extends LazyLogging { r.keys(pattern).map(_.flatten).getOrElse(List()) } + def insertKey(id: String, expirationOpt: Option[FiniteDuration] = None): Fox[Unit] = + insert(id, "", expirationOpt) + def insert(id: String, value: String, expirationOpt: Option[FiniteDuration] = None): Fox[Unit] = withExceptionHandler { expirationOpt diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/TemporaryStore.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/TemporaryStore.scala index 1ec23abd39a..a290b5bc644 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/TemporaryStore.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/TemporaryStore.scala @@ -10,7 +10,7 @@ class TemporaryStore[K, V] @Inject()(system: ActorSystem) { lazy val map: scala.collection.mutable.Map[K, V] = scala.collection.mutable.Map() - def find(id: K): Option[V] = + def get(id: K): Option[V] = map.synchronized { map.get(id) } @@ -20,12 +20,12 @@ class TemporaryStore[K, V] @Inject()(system: ActorSystem) { map.contains(id) ) - def findAll: Seq[V] = + def getAll: Seq[V] = map.synchronized { map.values.toList } - def findAllConditionalWithKey(predicate: K => Boolean): scala.collection.Map[K, V] = + def getAllConditionalWithKey(predicate: K => Boolean): scala.collection.Map[K, V] = map.synchronized { map.view.filterKeys(predicate).toMap } diff --git a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes index 170ed69c5b0..f5303e2e1d2 100644 --- a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes +++ b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes @@ -5,122 +5,122 @@ GET /health @com.scalableminds.webknossos.datastore.controllers.Application.health # Read image data -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboidPost(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboid(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.thumbnailJpeg(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.findData(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.histogram(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboidPost(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboid(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.thumbnailJpeg(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.findData(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.histogram(organizationId: String, datasetDirectoryName: String, dataLayerName: String) # Knossos compatible routes -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaKnossos(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaKnossos(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) # Zarr2 compatible routes -GET /zarr/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName="") -GET /zarr/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(token: Option[String], organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZAttrs(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZArray(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) - -GET /annotations/zarr/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zGroupPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName="") -GET /annotations/zarr/:accessTokenOrId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zAttrsWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zGroupPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zArrayPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) +GET /zarr/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetDirectoryName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(organizationId: String, datasetDirectoryName: String, dataLayerName="") +GET /zarr/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZAttrs(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZArray(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) +GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) + +GET /annotations/zarr/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zGroupPrivateLink(accessTokenOrId: String, dataLayerName="") +GET /annotations/zarr/:accessTokenOrId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceWithAnnotationPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zAttrsWithAnnotationPrivateLink(accessTokenOrId: String, dataLayerName: String) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zGroupPrivateLink(accessTokenOrId: String, dataLayerName: String) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zArrayPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) # Zarr3 compatible routes -GET /zarr3_experimental/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(token: Option[String], organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJson(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJsonForMag(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) - -GET /annotations/zarr3_experimental/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) +GET /zarr3_experimental/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJson(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJsonForMag(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) +GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) + +GET /annotations/zarr3_experimental/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceWithAnnotationPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonWithAnnotationPrivateLink(accessTokenOrId: String, dataLayerName: String) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) # Segmentation mappings -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.mappingJson(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listMappings(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.mappingJson(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listMappings(organizationId: String, datasetDirectoryName: String, dataLayerName: String) # Agglomerate files -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listAgglomerates(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/skeleton/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.generateAgglomerateSkeleton(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/agglomerateGraph/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateGraph(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/largestAgglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.largestAgglomerateId(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForSegmentIds(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForAllSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForAllSegmentIds(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/positionForSegment @com.scalableminds.webknossos.datastore.controllers.DataSourceController.positionForSegmentViaAgglomerateFile(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, segmentId: Long) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listAgglomerates(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/skeleton/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.generateAgglomerateSkeleton(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/agglomerateGraph/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateGraph(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/largestAgglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.largestAgglomerateId(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForSegmentIds(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForAllSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForAllSegmentIds(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/positionForSegment @com.scalableminds.webknossos.datastore.controllers.DataSourceController.positionForSegmentViaAgglomerateFile(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, segmentId: Long) # Mesh files -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshFiles(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes/chunks @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshChunksForSegment(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, targetMappingName: Option[String], editableMappingTracingId: Option[String]) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes/chunks/data @com.scalableminds.webknossos.datastore.controllers.DSMeshController.readMeshChunk(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes/fullMesh.stl @com.scalableminds.webknossos.datastore.controllers.DSMeshController.loadFullMeshStl(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshFiles(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes/chunks @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshChunksForSegment(organizationId: String, datasetDirectoryName: String, dataLayerName: String, targetMappingName: Option[String], editableMappingTracingId: Option[String]) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes/chunks/data @com.scalableminds.webknossos.datastore.controllers.DSMeshController.readMeshChunk(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes/fullMesh.stl @com.scalableminds.webknossos.datastore.controllers.DSMeshController.loadFullMeshStl(organizationId: String, datasetDirectoryName: String, dataLayerName: String) # Connectome files -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listConnectomeFiles(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes/synapses/positions @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsePositions(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes/synapses/types @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapseTypes(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes/synapses/:direction @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapticPartnerForSynapses(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, direction: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes/synapses @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsesForAgglomerates(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listConnectomeFiles(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes/synapses/positions @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsePositions(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes/synapses/types @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapseTypes(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes/synapses/:direction @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapticPartnerForSynapses(organizationId: String, datasetDirectoryName: String, dataLayerName: String, direction: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes/synapses @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsesForAgglomerates(organizationId: String, datasetDirectoryName: String, dataLayerName: String) # Ad-Hoc Meshing -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(organizationId: String, datasetDirectoryName: String, dataLayerName: String) # Segment-Index files -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/hasSegmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.checkSegmentIndexFile(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.querySegmentIndex(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentIndex/:segmentId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentIndex(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String, segmentId: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentStatistics/volume @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentVolume(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentStatistics/boundingBox @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentBoundingBox(token: Option[String], organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/hasSegmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.checkSegmentIndexFile(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.querySegmentIndex(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentIndex/:segmentId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentIndex(organizationId: String, datasetDirectoryName: String, dataLayerName: String, segmentId: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentStatistics/volume @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentVolume(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentStatistics/boundingBox @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentBoundingBox(organizationId: String, datasetDirectoryName: String, dataLayerName: String) # DataSource management -GET /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.testChunk(token: Option[String], resumableChunkNumber: Int, resumableIdentifier: String) -POST /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.uploadChunk(token: Option[String]) -GET /datasets/getUnfinishedUploads @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getUnfinishedUploads(token: Option[String], organizationName: String) -POST /datasets/reserveUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reserveUpload(token: Option[String]) -POST /datasets/reserveManualUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reserveManualUpload(token: Option[String]) -POST /datasets/finishUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.finishUpload(token: Option[String]) -POST /datasets/cancelUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.cancelUpload(token: Option[String]) -GET /datasets/measureUsedStorage/:organizationId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.measureUsedStorage(token: Option[String], organizationId: String, datasetDirectoryName: Option[String]) -GET /datasets/:organizationId/:datasetDirectoryName/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(token: Option[String], organizationId: String, datasetDirectoryName: String) -PUT /datasets/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(token: Option[String], organizationId: String, datasetDirectoryName: String) -POST /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(token: Option[String], organizationId: String, datasetName: String, folderId: Option[String]) -DELETE /datasets/:organizationId/:datasetDirectoryName/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(token: Option[String], organizationId: String, datasetDirectoryName: String) -POST /datasets/compose @com.scalableminds.webknossos.datastore.controllers.DataSourceController.compose(token: Option[String]) -POST /datasets/exploreRemote @com.scalableminds.webknossos.datastore.controllers.DataSourceController.exploreRemoteDataset(token: Option[String]) +GET /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.testChunk(resumableChunkNumber: Int, resumableIdentifier: String) +POST /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.uploadChunk() +GET /datasets/getUnfinishedUploads @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getUnfinishedUploads(organizationName: String) +POST /datasets/reserveUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reserveUpload() +POST /datasets/reserveManualUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reserveManualUpload() +POST /datasets/finishUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.finishUpload() +POST /datasets/cancelUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.cancelUpload() +GET /datasets/measureUsedStorage/:organizationId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.measureUsedStorage(organizationId: String, datasetDirectoryName: Option[String]) +GET /datasets/:organizationId/:datasetDirectoryName/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(organizationId: String, datasetDirectoryName: String) +PUT /datasets/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(organizationId: String, datasetDirectoryName: String) +POST /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(organizationId: String, datasetName: String, folderId: Option[String]) +DELETE /datasets/:organizationId/:datasetDirectoryName/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(organizationId: String, datasetDirectoryName: String) +POST /datasets/compose @com.scalableminds.webknossos.datastore.controllers.DataSourceController.compose() +POST /datasets/exploreRemote @com.scalableminds.webknossos.datastore.controllers.DataSourceController.exploreRemoteDataset() # Actions -POST /triggers/checkInboxBlocking @com.scalableminds.webknossos.datastore.controllers.DataSourceController.triggerInboxCheckBlocking(token: Option[String]) -POST /triggers/createOrganizationDirectory @com.scalableminds.webknossos.datastore.controllers.DataSourceController.createOrganizationDirectory(token: Option[String], organizationId: String) -POST /triggers/reload/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reload(token: Option[String], organizationId: String, datasetDirectoryName: String, layerName: Option[String]) +POST /triggers/checkInboxBlocking @com.scalableminds.webknossos.datastore.controllers.DataSourceController.triggerInboxCheckBlocking() +POST /triggers/createOrganizationDirectory @com.scalableminds.webknossos.datastore.controllers.DataSourceController.createOrganizationDirectory(organizationId: String) +POST /triggers/reload/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reload(organizationId: String, datasetDirectoryName: String, layerName: Option[String]) # Exports -GET /exports/:jobId/download @com.scalableminds.webknossos.datastore.controllers.ExportsController.download(token: Option[String], jobId: String) +GET /exports/:jobId/download @com.scalableminds.webknossos.datastore.controllers.ExportsController.download(jobId: String) diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto new file mode 100644 index 00000000000..a831beafb49 --- /dev/null +++ b/webknossos-datastore/proto/Annotation.proto @@ -0,0 +1,23 @@ +syntax = "proto2"; + +package com.scalableminds.webknossos.datastore; + +enum AnnotationLayerTypeProto { + Skeleton = 1; + Volume = 2; +} + +message AnnotationProto { + required string description = 1; // emptystring encodes no description + required int64 version = 2; + repeated AnnotationLayerProto annotationLayers = 3; + required int64 earliestAccessibleVersion = 4; + optional bool skeletonMayHavePendingUpdates = 5; // relevant only for annotations migrated by https://github.com/scalableminds/webknossos/pull/7917 + optional bool editableMappingsMayHavePendingUpdates = 6; // relevant only for annotations migrated by https://github.com/scalableminds/webknossos/pull/7917 +} + +message AnnotationLayerProto { + required string tracingId = 1; + required string name = 2; + required AnnotationLayerTypeProto typ = 3; +} diff --git a/webknossos-datastore/proto/SegmentToAgglomerateProto.proto b/webknossos-datastore/proto/SegmentToAgglomerateProto.proto index 519276323c3..6bb61fdf783 100644 --- a/webknossos-datastore/proto/SegmentToAgglomerateProto.proto +++ b/webknossos-datastore/proto/SegmentToAgglomerateProto.proto @@ -7,6 +7,6 @@ message SegmentAgglomeratePair { required int64 agglomerateId = 2; } -message SegmentToAgglomerateProto { +message SegmentToAgglomerateChunkProto { repeated SegmentAgglomeratePair segmentToAgglomerate = 1; } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala index c5cbcccfe07..40a415b20a7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox @@ -38,36 +39,28 @@ class TSRemoteDatastoreClient @Inject()( private lazy val largestAgglomerateIdCache: AlfuCache[(RemoteFallbackLayer, String, Option[String]), Long] = AlfuCache(timeToLive = 10 minutes) - def getAgglomerateSkeleton(userToken: Option[String], - remoteFallbackLayer: RemoteFallbackLayer, - mappingName: String, - agglomerateId: Long): Fox[Array[Byte]] = + def getAgglomerateSkeleton(remoteFallbackLayer: RemoteFallbackLayer, mappingName: String, agglomerateId: Long)( + implicit tc: TokenContext): Fox[Array[Byte]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/agglomerates/$mappingName/skeleton/$agglomerateId") - .addQueryStringOptional("token", userToken) - .getWithBytesResponse + result <- rpc(s"$remoteLayerUri/agglomerates/$mappingName/skeleton/$agglomerateId").withTokenFromContext.getWithBytesResponse } yield result - def getData(remoteFallbackLayer: RemoteFallbackLayer, - dataRequests: List[WebknossosDataRequest], - userToken: Option[String]): Fox[(Array[Byte], List[Int])] = + def getData(remoteFallbackLayer: RemoteFallbackLayer, dataRequests: List[WebknossosDataRequest])( + implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - response <- rpc(s"$remoteLayerUri/data").addQueryStringOptional("token", userToken).silent.post(dataRequests) + response <- rpc(s"$remoteLayerUri/data").withTokenFromContext.silent.post(dataRequests) _ <- bool2Fox(Status.isSuccessful(response.status)) bytes = response.bodyAsBytes.toArray indices <- parseMissingBucketHeader(response.header(missingBucketsHeader)) ?~> "failed to parse missing bucket header" } yield (bytes, indices) - def getVoxelAtPosition(userToken: Option[String], - remoteFallbackLayer: RemoteFallbackLayer, - pos: Vec3Int, - mag: Vec3Int): Fox[Array[Byte]] = + def getVoxelAtPosition(remoteFallbackLayer: RemoteFallbackLayer, pos: Vec3Int, mag: Vec3Int)( + implicit tc: TokenContext): Fox[Array[Byte]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/data") - .addQueryStringOptional("token", userToken) + result <- rpc(s"$remoteLayerUri/data").withTokenFromContext .addQueryString("x" -> pos.x.toString) .addQueryString("y" -> pos.y.toString) .addQueryString("z" -> pos.z.toString) @@ -81,33 +74,25 @@ class TSRemoteDatastoreClient @Inject()( def getAgglomerateIdsForSegmentIds(remoteFallbackLayer: RemoteFallbackLayer, mappingName: String, - segmentIdsOrdered: List[Long], - userToken: Option[String]): Fox[List[Long]] = + segmentIdsOrdered: List[Long])(implicit tc: TokenContext): Fox[List[Long]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) segmentIdsOrderedProto = ListOfLong(items = segmentIdsOrdered) - result <- rpc(s"$remoteLayerUri/agglomerates/$mappingName/agglomeratesForSegments") - .addQueryStringOptional("token", userToken) - .silent + result <- rpc(s"$remoteLayerUri/agglomerates/$mappingName/agglomeratesForSegments").withTokenFromContext.silent .postProtoWithProtoResponse[ListOfLong, ListOfLong](segmentIdsOrderedProto)(ListOfLong) } yield result.items.toList - def getAgglomerateGraph(remoteFallbackLayer: RemoteFallbackLayer, - baseMappingName: String, - agglomerateId: Long, - userToken: Option[String]): Fox[AgglomerateGraph] = + def getAgglomerateGraph(remoteFallbackLayer: RemoteFallbackLayer, baseMappingName: String, agglomerateId: Long)( + implicit tc: TokenContext): Fox[AgglomerateGraph] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/agglomerates/$baseMappingName/agglomerateGraph/$agglomerateId").silent - .addQueryStringOptional("token", userToken) - .silent + result <- rpc(s"$remoteLayerUri/agglomerates/$baseMappingName/agglomerateGraph/$agglomerateId").silent.withTokenFromContext.silent .getWithProtoResponse[AgglomerateGraph](AgglomerateGraph) } yield result - def getLargestAgglomerateId(remoteFallbackLayer: RemoteFallbackLayer, - mappingName: String, - userToken: Option[String]): Fox[Long] = { - val cacheKey = (remoteFallbackLayer, mappingName, userToken) + def getLargestAgglomerateId(remoteFallbackLayer: RemoteFallbackLayer, mappingName: String)( + implicit tc: TokenContext): Fox[Long] = { + val cacheKey = (remoteFallbackLayer, mappingName, tc.userTokenOpt) largestAgglomerateIdCache.getOrLoad( cacheKey, k => @@ -121,26 +106,20 @@ class TSRemoteDatastoreClient @Inject()( ) } - def hasSegmentIndexFile(remoteFallbackLayer: RemoteFallbackLayer, userToken: Option[String]): Fox[Boolean] = + def hasSegmentIndexFile(remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Boolean] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - hasIndexFile <- rpc(s"$remoteLayerUri/hasSegmentIndex") - .addQueryStringOptional("token", userToken) - .silent - .getWithJsonResponse[Boolean] + hasIndexFile <- rpc(s"$remoteLayerUri/hasSegmentIndex").withTokenFromContext.silent.getWithJsonResponse[Boolean] } yield hasIndexFile def querySegmentIndex(remoteFallbackLayer: RemoteFallbackLayer, segmentId: Long, mag: Vec3Int, mappingName: Option[String], // should be the baseMappingName in case of editable mappings - editableMappingTracingId: Option[String], - userToken: Option[String]): Fox[Seq[Vec3Int]] = + editableMappingTracingId: Option[String])(implicit tc: TokenContext): Fox[Seq[Vec3Int]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - positions <- rpc(s"$remoteLayerUri/segmentIndex/$segmentId") - .addQueryStringOptional("token", userToken) - .silent + positions <- rpc(s"$remoteLayerUri/segmentIndex/$segmentId").withTokenFromContext.silent .postJsonWithJsonResponse[GetSegmentIndexParameters, Seq[Vec3Int]](GetSegmentIndexParameters( mag, cubeSize = Vec3Int.ones, // Don't use the cubeSize parameter here (since we want to calculate indices later anyway) @@ -157,13 +136,10 @@ class TSRemoteDatastoreClient @Inject()( segmentIds: Seq[Long], mag: Vec3Int, mappingName: Option[String], // should be the baseMappingName in case of editable mappings - editableMappingTracingId: Option[String], - userToken: Option[String]): Fox[Seq[(Long, Seq[Vec3Int])]] = + editableMappingTracingId: Option[String])(implicit tc: TokenContext): Fox[Seq[(Long, Seq[Vec3Int])]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/segmentIndex") - .addQueryStringOptional("token", userToken) - .silent + result <- rpc(s"$remoteLayerUri/segmentIndex").withTokenFromContext.silent .postJsonWithJsonResponse[GetMultipleSegmentIndexParameters, Seq[SegmentIndexData]]( GetMultipleSegmentIndexParameters(segmentIds.toList, mag, @@ -173,26 +149,23 @@ class TSRemoteDatastoreClient @Inject()( } yield result.map(data => (data.segmentId, data.positions)) - def loadFullMeshStl(token: Option[String], - remoteFallbackLayer: RemoteFallbackLayer, - fullMeshRequest: FullMeshRequest): Fox[Array[Byte]] = + def loadFullMeshStl(remoteFallbackLayer: RemoteFallbackLayer, fullMeshRequest: FullMeshRequest)( + implicit tc: TokenContext): Fox[Array[Byte]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/meshes/fullMesh.stl") - .addQueryStringOptional("token", token) + result <- rpc(s"$remoteLayerUri/meshes/fullMesh.stl").withTokenFromContext .postJsonWithBytesResponse(fullMeshRequest) } yield result - def voxelSizeForTracingWithCache(tracingId: String, token: Option[String]): Fox[VoxelSize] = - voxelSizeCache.getOrLoad(tracingId, tId => voxelSizeForTracing(tId, token)) + def voxelSizeForTracingWithCache(tracingId: String)(implicit tc: TokenContext): Fox[VoxelSize] = + voxelSizeCache.getOrLoad(tracingId, tId => voxelSizeForTracing(tId)) - private def voxelSizeForTracing(tracingId: String, token: Option[String]): Fox[VoxelSize] = + private def voxelSizeForTracing(tracingId: String)(implicit tc: TokenContext): Fox[VoxelSize] = for { dataSourceId <- remoteWebknossosClient.getDataSourceIdForTracing(tracingId) dataStoreUri <- dataStoreUriWithCache(dataSourceId.organizationId, dataSourceId.directoryName) result <- rpc( - s"$dataStoreUri/data/datasets/${dataSourceId.organizationId}/${dataSourceId.directoryName}/readInboxDataSource") - .addQueryStringOptional("token", token) + s"$dataStoreUri/data/datasets/${dataSourceId.organizationId}/${dataSourceId.directoryName}/readInboxDataSource").withTokenFromContext .getWithJsonResponse[InboxDataSource] scale <- result.voxelSizeOpt ?~> "could not determine voxel size of dataset" } yield scale diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 6e98968ace5..66f20ecc14c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -1,9 +1,14 @@ package com.scalableminds.webknossos.tracingstore import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, DataSourceLike} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{ @@ -12,21 +17,23 @@ import com.scalableminds.webknossos.datastore.services.{ UserAccessAnswer, UserAccessRequest } +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.typesafe.scalalogging.LazyLogging import play.api.inject.ApplicationLifecycle import play.api.libs.json.{JsObject, Json, OFormat} import play.api.libs.ws.WSResponse import scala.concurrent.ExecutionContext +import scala.concurrent.duration.DurationInt -case class TracingUpdatesReport(tracingId: String, - timestamps: List[Instant], - statistics: Option[JsObject], - significantChangesCount: Int, - viewChangesCount: Int, - userToken: Option[String]) -object TracingUpdatesReport { - implicit val jsonFormat: OFormat[TracingUpdatesReport] = Json.format[TracingUpdatesReport] +case class AnnotationUpdatesReport(annotationId: String, + timestamps: List[Instant], + statistics: Option[JsObject], + significantChangesCount: Int, + viewChangesCount: Int, + userToken: Option[String]) +object AnnotationUpdatesReport { + implicit val jsonFormat: OFormat[AnnotationUpdatesReport] = Json.format[AnnotationUpdatesReport] } class TSRemoteWebknossosClient @Inject()( @@ -42,17 +49,21 @@ class TSRemoteWebknossosClient @Inject()( private val webknossosUri: String = config.Tracingstore.WebKnossos.uri private lazy val dataSourceIdByTracingIdCache: AlfuCache[String, DataSourceId] = AlfuCache() + private lazy val annotationIdByTracingIdCache: AlfuCache[String, String] = + AlfuCache(maxCapacity = 10000, timeToLive = 5 minutes) - def reportTracingUpdates(tracingUpdatesReport: TracingUpdatesReport): Fox[WSResponse] = + def reportAnnotationUpdates(tracingUpdatesReport: AnnotationUpdatesReport): Fox[WSResponse] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/handleTracingUpdateReport") .addQueryString("key" -> tracingStoreKey) .silent .post(Json.toJson(tracingUpdatesReport)) - def getDataSourceForTracing(tracingId: String): Fox[DataSourceLike] = + def getDataSourceForTracing(tracingId: String)(implicit tc: TokenContext): Fox[DataSourceLike] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataSource") .addQueryString("tracingId" -> tracingId) .addQueryString("key" -> tracingStoreKey) + .withTokenFromContext + .silent .getWithJsonResponse[DataSourceLike] def getDataStoreUriForDataSource(organizationId: String, datasetDirectoryName: String): Fox[String] = @@ -69,13 +80,51 @@ class TSRemoteWebknossosClient @Inject()( rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataSourceId") .addQueryString("tracingId" -> tracingId) .addQueryString("key" -> tracingStoreKey) + .silent .getWithJsonResponse[DataSourceId] ) - override def requestUserAccess(token: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] = + def getAnnotationIdForTracing(tracingId: String)(implicit ec: ExecutionContext): Fox[String] = + annotationIdByTracingIdCache.getOrLoad( + tracingId, + tracingId => + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/annotationId") + .addQueryString("tracingId" -> tracingId) + .addQueryString("key" -> tracingStoreKey) + .silent + .getWithJsonResponse[String] + ) ?~> "annotation.idForTracing.failed" + + def updateAnnotation(annotationId: String, annotationProto: AnnotationProto): Fox[Unit] = + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/updateAnnotation") + .addQueryString("annotationId" -> annotationId) + .addQueryString("key" -> tracingStoreKey) + .silent + .postProto(annotationProto) + + def createTracingFor(annotationId: String, + layerParameters: AnnotationLayerParameters, + previousVersion: Long): Fox[Either[SkeletonTracing, VolumeTracing]] = { + val req = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/createTracing") + .addQueryString("annotationId" -> annotationId) + .addQueryString("previousVersion" -> previousVersion.toString) // used for fetching old precedence layers + .addQueryString("key" -> tracingStoreKey) + layerParameters.typ match { + case AnnotationLayerType.Volume => + req + .postJsonWithProtoResponse[AnnotationLayerParameters, VolumeTracing](layerParameters)(VolumeTracing) + .map(Right(_)) + case AnnotationLayerType.Skeleton => + req + .postJsonWithProtoResponse[AnnotationLayerParameters, SkeletonTracing](layerParameters)(SkeletonTracing) + .map(Left(_)) + } + } + + override def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/validateUserAccess") .addQueryString("key" -> tracingStoreKey) - .addQueryStringOptional("token", token) + .withTokenFromContext .postJsonWithJsonResponse[UserAccessRequest, UserAccessAnswer](accessRequest) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala index cd6fb91fc9d..e67aaddec71 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala @@ -1,14 +1,14 @@ package com.scalableminds.webknossos.tracingstore -import org.apache.pekko.actor.ActorSystem import com.google.inject.AbstractModule import com.google.inject.name.Names import com.scalableminds.webknossos.datastore.services.AdHocMeshServiceHolder +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.TracingDataStore import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService -import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService +import com.scalableminds.webknossos.tracingstore.tracings.{TemporaryTracingService, TracingDataStore} +import org.apache.pekko.actor.ActorSystem class TracingStoreModule extends AbstractModule { @@ -17,7 +17,7 @@ class TracingStoreModule extends AbstractModule { override def configure(): Unit = { bind(classOf[ActorSystem]).annotatedWith(Names.named("webknossos-tracingstore")).toInstance(system) bind(classOf[TracingDataStore]).asEagerSingleton() - bind(classOf[SkeletonTracingService]).asEagerSingleton() + bind(classOf[TemporaryTracingService]).asEagerSingleton() bind(classOf[VolumeTracingService]).asEagerSingleton() bind(classOf[TracingStoreAccessTokenService]).asEagerSingleton() bind(classOf[TSRemoteWebknossosClient]).asEagerSingleton() @@ -25,5 +25,8 @@ class TracingStoreModule extends AbstractModule { bind(classOf[EditableMappingService]).asEagerSingleton() bind(classOf[TSSlackNotificationService]).asEagerSingleton() bind(classOf[AdHocMeshServiceHolder]).asEagerSingleton() + bind(classOf[AnnotationTransactionService]).asEagerSingleton() + bind(classOf[TSAnnotationService]).asEagerSingleton() } + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala new file mode 100644 index 00000000000..6fd848fc7b7 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala @@ -0,0 +1,41 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} +import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService + +import scala.concurrent.ExecutionContext + +trait AnnotationReversion { + + def volumeTracingService: VolumeTracingService + + def revertDistributedElements(currentAnnotationWithTracings: AnnotationWithTracings, + sourceAnnotationWithTracings: AnnotationWithTracings, + sourceVersion: Long, + newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = + for { + _ <- Fox.serialCombined(sourceAnnotationWithTracings.getVolumes) { + // Only volume data for volume layers present in the *source annotation* needs to be reverted. + case (tracingId, sourceTracing) => + for { + tracingBeforeRevert <- currentAnnotationWithTracings.getVolume(tracingId).toFox + _ <- Fox.runIf(!sourceTracing.getHasEditableMapping)( + volumeTracingService + .revertVolumeData(tracingId, sourceVersion, sourceTracing, newVersion: Long, tracingBeforeRevert)) + _ <- Fox.runIf(sourceTracing.getHasEditableMapping)( + revertEditableMappingFields(currentAnnotationWithTracings, sourceVersion, tracingId)) + } yield () + } + } yield () + + private def revertEditableMappingFields(currentAnnotationWithTracings: AnnotationWithTracings, + sourceVersion: Long, + tracingId: String)(implicit ec: ExecutionContext): Fox[Unit] = + for { + updater <- currentAnnotationWithTracings.getEditableMappingUpdater(tracingId).toFox + _ <- updater.revertToVersion(sourceVersion) + _ <- updater.flushBuffersToFossil() + } yield () +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala new file mode 100644 index 00000000000..f8a6ac78a2b --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -0,0 +1,288 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.time.Instant +import com.scalableminds.util.tools.Fox.bool2Fox +import com.scalableminds.util.tools.{Fox, JsonHelper} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + BucketMutatingVolumeUpdateAction, + UpdateBucketVolumeAction, + VolumeTracingService +} +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore, TracingId} +import com.scalableminds.webknossos.tracingstore.{ + AnnotationUpdatesReport, + TSRemoteWebknossosClient, + TracingStoreRedisStore +} +import com.typesafe.scalalogging.LazyLogging +import play.api.http.Status.CONFLICT +import play.api.libs.json.Json + +import javax.inject.Inject +import scala.concurrent.ExecutionContext +import scala.concurrent.duration._ + +class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRedisStore, + uncommittedUpdatesStore: TracingStoreRedisStore, + volumeTracingService: VolumeTracingService, + tracingDataStore: TracingDataStore, + remoteWebknossosClient: TSRemoteWebknossosClient, + annotationService: TSAnnotationService) + extends KeyValueStoreImplicits + with LazyLogging { + + private val transactionGroupExpiry: FiniteDuration = 24 hours + private val handledGroupCacheExpiry: FiniteDuration = 24 hours + + private def transactionGroupKey(annotationId: String, + transactionId: String, + transactionGroupIndex: Int, + version: Long) = + s"transactionGroup___${annotationId}___${transactionId}___${transactionGroupIndex}___$version" + + private def handledGroupKey(annotationId: String, transactionId: String, version: Long, transactionGroupIndex: Int) = + s"handledGroup___${annotationId}___${transactionId}___${version}___$transactionGroupIndex" + + private def patternFor(annotationId: String, transactionId: String) = + s"transactionGroup___${annotationId}___${transactionId}___*" + + private def saveUncommitted(annotationId: String, + transactionId: String, + transactionGroupIndex: Int, + version: Long, + updateGroup: UpdateActionGroup, + expiry: FiniteDuration)(implicit ec: ExecutionContext): Fox[Unit] = + for { + _ <- Fox.runIf(transactionGroupIndex > 0)( + Fox.assertTrue( + uncommittedUpdatesStore.contains(transactionGroupKey( + annotationId, + transactionId, + transactionGroupIndex - 1, + version))) ?~> s"Incorrect transaction index. Got: $transactionGroupIndex but ${transactionGroupIndex - 1} does not exist" ~> CONFLICT) + _ <- uncommittedUpdatesStore.insert( + transactionGroupKey(annotationId, transactionId, transactionGroupIndex, version), + Json.toJson(updateGroup).toString(), + Some(expiry)) + } yield () + + private def handleUpdateGroupForTransaction( + annotationId: String, + previousVersionFox: Fox[Long], + updateGroup: UpdateActionGroup)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Long] = + for { + previousCommittedVersion: Long <- previousVersionFox + result <- if (previousCommittedVersion + 1 == updateGroup.version) { + if (updateGroup.transactionGroupCount == updateGroup.transactionGroupIndex + 1) { + // Received the last group of this transaction + commitWithPending(annotationId, updateGroup) + } else { + for { + _ <- saveUncommitted(annotationId, + updateGroup.transactionId, + updateGroup.transactionGroupIndex, + updateGroup.version, + updateGroup, + transactionGroupExpiry) + _ <- saveToHandledGroupIdStore(annotationId, + updateGroup.transactionId, + updateGroup.version, + updateGroup.transactionGroupIndex) + } yield previousCommittedVersion // no updates have been committed, do not yield version increase + } + } else { + failUnlessAlreadyHandled(updateGroup, annotationId, previousCommittedVersion) + } + } yield result + + // For an update group (that is the last of a transaction), fetch all previous uncommitted for the same transaction + // and commit them all. + private def commitWithPending(annotationId: String, updateGroup: UpdateActionGroup)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = + for { + previousActionGroupsToCommit <- getAllUncommittedFor(annotationId, updateGroup.transactionId) + _ <- bool2Fox( + previousActionGroupsToCommit + .exists(_.transactionGroupIndex == 0) || updateGroup.transactionGroupCount == 1) ?~> s"Trying to commit a transaction without a group that has transactionGroupIndex 0." + concatenatedGroup = concatenateUpdateGroupsOfTransaction(previousActionGroupsToCommit, updateGroup) + commitResult <- commitUpdates(annotationId, List(concatenatedGroup)) + _ <- removeAllUncommittedFor(annotationId, updateGroup.transactionId) + } yield commitResult + + private def removeAllUncommittedFor(tracingId: String, transactionId: String): Fox[Unit] = + uncommittedUpdatesStore.removeAllConditional(patternFor(tracingId, transactionId)) + + private def getAllUncommittedFor(annotationId: String, transactionId: String): Fox[List[UpdateActionGroup]] = + for { + raw: Seq[String] <- uncommittedUpdatesStore.findAllConditional(patternFor(annotationId, transactionId)) + parsed: Seq[UpdateActionGroup] = raw.flatMap(itemAsString => + JsonHelper.jsResultToOpt(Json.parse(itemAsString).validate[UpdateActionGroup])) + } yield parsed.toList.sortBy(_.transactionGroupIndex) + + private def saveToHandledGroupIdStore(annotationId: String, + transactionId: String, + version: Long, + transactionGroupIndex: Int): Fox[Unit] = { + val key = handledGroupKey(annotationId, transactionId, version, transactionGroupIndex) + handledGroupIdStore.insert(key, "()", Some(handledGroupCacheExpiry)) + } + + private def handledGroupIdStoreContains(annotationId: String, + transactionId: String, + version: Long, + transactionGroupIndex: Int): Fox[Boolean] = + handledGroupIdStore.contains(handledGroupKey(annotationId, transactionId, version, transactionGroupIndex)) + + private def concatenateUpdateGroupsOfTransaction(previousActionGroups: List[UpdateActionGroup], + lastActionGroup: UpdateActionGroup): UpdateActionGroup = + if (previousActionGroups.isEmpty) lastActionGroup + else { + val allActionGroups = previousActionGroups :+ lastActionGroup + UpdateActionGroup( + version = lastActionGroup.version, + timestamp = lastActionGroup.timestamp, + authorId = lastActionGroup.authorId, + actions = allActionGroups.flatMap(_.actions), + stats = lastActionGroup.stats, // the latest stats do count + info = lastActionGroup.info, // frontend sets this identically for all groups of transaction + transactionId = f"${lastActionGroup.transactionId}-concatenated", + transactionGroupCount = 1, + transactionGroupIndex = 0, + ) + } + + def handleSingleUpdateAction(annotationId: String, currentVersion: Long, updateAction: UpdateAction)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = { + val wrapped = List( + UpdateActionGroup( + currentVersion + 1, + System.currentTimeMillis(), + None, + List(updateAction), + None, + None, + "dummyTransactionId", + 1, + 0 + )) + handleUpdateGroups(annotationId, wrapped) + } + + def handleUpdateGroups(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = + if (updateGroups.forall(_.transactionGroupCount == 1)) { + commitUpdates(annotationId, updateGroups) + } else { + updateGroups.foldLeft(annotationService.currentMaterializableVersion(annotationId)) { + (currentCommittedVersionFox, updateGroup) => + handleUpdateGroupForTransaction(annotationId, currentCommittedVersionFox, updateGroup) + } + } + + // Perform version check and commit the passed updates + private def commitUpdates(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = + for { + _ <- reportUpdates(annotationId, updateGroups) + currentCommittedVersion: Fox[Long] = annotationService.currentMaterializableVersion(annotationId) + newVersion <- updateGroups.foldLeft(currentCommittedVersion) { (previousVersion, updateGroup) => + previousVersion.flatMap { prevVersion: Long => + if (prevVersion + 1 == updateGroup.version) { + for { + _ <- handleUpdateGroup(annotationId, updateGroup) + _ <- saveToHandledGroupIdStore(annotationId, + updateGroup.transactionId, + updateGroup.version, + updateGroup.transactionGroupIndex) + } yield updateGroup.version + } else failUnlessAlreadyHandled(updateGroup, annotationId, prevVersion) + } + } + _ <- applyImmediatelyIfNeeded(annotationId, updateGroups.flatMap(_.actions), newVersion) + } yield newVersion + + private def applyImmediatelyIfNeeded(annotationId: String, updates: List[UpdateAction], newVersion: Long)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Unit] = + if (containsApplyImmediatelyUpdateActions(updates)) { + annotationService.get(annotationId, Some(newVersion)).map(_ => ()) + } else Fox.successful(()) + + private def containsApplyImmediatelyUpdateActions(updates: List[UpdateAction]) = updates.exists { + case _: ApplyImmediatelyUpdateAction => true + case _ => false + } + + private def handleUpdateGroup(annotationId: String, updateActionGroup: UpdateActionGroup)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Unit] = + for { + updateActionsJson <- Fox.successful(Json.toJson(preprocessActionsForStorage(updateActionGroup))) + _ <- tracingDataStore.annotationUpdates.put(annotationId, updateActionGroup.version, updateActionsJson) + bucketMutatingActions = findBucketMutatingActions(updateActionGroup) + actionsGrouped: Map[String, List[BucketMutatingVolumeUpdateAction]] = bucketMutatingActions.groupBy( + _.actionTracingId) + _ <- Fox.serialCombined(actionsGrouped.keys.toList) { volumeTracingId => + for { + tracing <- annotationService.findVolume(annotationId, volumeTracingId) + _ <- volumeTracingService.applyBucketMutatingActions(volumeTracingId, + tracing, + bucketMutatingActions, + updateActionGroup.version) + } yield () + } + } yield () + + private def findBucketMutatingActions(updateActionGroup: UpdateActionGroup): List[BucketMutatingVolumeUpdateAction] = + updateActionGroup.actions.flatMap { + case a: BucketMutatingVolumeUpdateAction => Some(a) + case _ => None + } + + private def preprocessActionsForStorage(updateActionGroup: UpdateActionGroup): List[UpdateAction] = { + val actionsWithInfo = updateActionGroup.actions.map( + _.addTimestamp(updateActionGroup.timestamp).addAuthorId(updateActionGroup.authorId)) match { + case Nil => List[UpdateAction]() + //to the first action in the group, attach the group's info + case first :: rest => first.addInfo(updateActionGroup.info) :: rest + } + actionsWithInfo.map { + case a: UpdateBucketVolumeAction => a.withoutBase64Data + case a: AddLayerAnnotationAction => a.copy(tracingId = Some(TracingId.generate)) + case a => a + } + } + + /* If this update group has already been “handled” (successfully saved as either committed or uncommitted), + * ignore it silently. This is in case the frontend sends a retry if it believes a save to be unsuccessful + * despite the backend receiving it just fine. + */ + private def failUnlessAlreadyHandled(updateGroup: UpdateActionGroup, tracingId: String, previousVersion: Long)( + implicit ec: ExecutionContext): Fox[Long] = { + val errorMessage = s"Incorrect version. Expected: ${previousVersion + 1}; Got: ${updateGroup.version}" + for { + _ <- Fox.assertTrue( + handledGroupIdStoreContains(tracingId, + updateGroup.transactionId, + updateGroup.version, + updateGroup.transactionGroupIndex)) ?~> errorMessage ~> CONFLICT + } yield updateGroup.version + } + + private def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup])( + implicit tc: TokenContext): Fox[Unit] = + for { + _ <- remoteWebknossosClient.reportAnnotationUpdates( + AnnotationUpdatesReport( + annotationId, + timestamps = updateGroups.map(g => Instant(g.timestamp)), + statistics = updateGroups.flatMap(_.stats).lastOption, + significantChangesCount = updateGroups.map(_.significantChangesCount).sum, + viewChangesCount = updateGroups.map(_.viewChangesCount).sum, + tc.userTokenOpt + )) + } yield () + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala new file mode 100644 index 00000000000..c0ab31de00d --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala @@ -0,0 +1,147 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType +import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis +import com.scalableminds.webknossos.tracingstore.tracings.volume.MagRestrictions +import play.api.libs.json.Json.WithDefaultValues +import play.api.libs.json.{Json, OFormat} + +case class AnnotationLayerParameters(typ: AnnotationLayerType, + fallbackLayerName: Option[String], + autoFallbackLayer: Boolean = false, + mappingName: Option[String] = None, + magRestrictions: Option[MagRestrictions], + name: Option[String], + additionalAxes: Option[Seq[AdditionalAxis]]) { + def getNameWithDefault: String = name.getOrElse(AnnotationLayer.defaultNameForType(typ)) +} +object AnnotationLayerParameters { + implicit val jsonFormat: OFormat[AnnotationLayerParameters] = + Json.using[WithDefaultValues].format[AnnotationLayerParameters] +} + +trait AnnotationUpdateAction extends UpdateAction + +case class AddLayerAnnotationAction(layerParameters: AnnotationLayerParameters, + tracingId: Option[String] = None, // filled in by backend eagerly on save + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +case class DeleteLayerAnnotationAction(tracingId: String, + layerName: String, // Just stored for nicer-looking history + typ: AnnotationLayerType, // Just stored for nicer-looking history + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +case class UpdateLayerMetadataAnnotationAction(tracingId: String, + layerName: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +case class UpdateMetadataAnnotationAction( + description: Option[String], // None means do not change description. Emptystring means set to empty + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +case class RevertToVersionAnnotationAction(sourceVersion: Long, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +// Used only in tasks by admin to undo the work done of the annotator +case class ResetToBaseAnnotationAction(actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +case class UpdateTdCameraAnnotationAction(actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction { + + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) + + override def isViewOnlyChange: Boolean = true +} + +object AddLayerAnnotationAction { + implicit val jsonFormat: OFormat[AddLayerAnnotationAction] = Json.format[AddLayerAnnotationAction] +} +object DeleteLayerAnnotationAction { + implicit val jsonFormat: OFormat[DeleteLayerAnnotationAction] = Json.format[DeleteLayerAnnotationAction] +} +object UpdateLayerMetadataAnnotationAction { + implicit val jsonFormat: OFormat[UpdateLayerMetadataAnnotationAction] = + Json.format[UpdateLayerMetadataAnnotationAction] +} +object UpdateMetadataAnnotationAction { + implicit val jsonFormat: OFormat[UpdateMetadataAnnotationAction] = + Json.format[UpdateMetadataAnnotationAction] +} +object RevertToVersionAnnotationAction { + implicit val jsonFormat: OFormat[RevertToVersionAnnotationAction] = + Json.format[RevertToVersionAnnotationAction] +} +object ResetToBaseAnnotationAction { + implicit val jsonFormat: OFormat[ResetToBaseAnnotationAction] = + Json.format[ResetToBaseAnnotationAction] +} +object UpdateTdCameraAnnotationAction { + implicit val jsonFormat: OFormat[UpdateTdCameraAnnotationAction] = Json.format[UpdateTdCameraAnnotationAction] +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala new file mode 100644 index 00000000000..8d99c5de3a0 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -0,0 +1,173 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} +import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationProto} +import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType} +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + EditableMappingUpdateAction, + EditableMappingUpdater +} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.SkeletonUpdateAction +import com.scalableminds.webknossos.tracingstore.tracings.volume.ApplyableVolumeUpdateAction +import com.typesafe.scalalogging.LazyLogging +import net.liftweb.common.{Box, Failure, Full} + +import scala.concurrent.ExecutionContext + +case class AnnotationWithTracings( + annotation: AnnotationProto, + tracingsById: Map[String, Either[SkeletonTracing, VolumeTracing]], + editableMappingsByTracingId: Map[String, (EditableMappingInfo, EditableMappingUpdater)]) + extends LazyLogging { + + def getSkeleton(tracingId: String): Box[SkeletonTracing] = + for { + tracingEither <- tracingsById.get(tracingId) + skeletonTracing <- tracingEither match { + case Left(st: SkeletonTracing) => Full(st) + case _ => Failure(f"Tried to access tracing $tracingId as skeleton, but is volume") + } + } yield skeletonTracing + + def getVolumes: List[(String, VolumeTracing)] = + tracingsById.view.flatMap { + case (id, Right(vt: VolumeTracing)) => Some(id, vt) + case _ => None + }.toList + + def getSkeletons: List[(String, SkeletonTracing)] = + tracingsById.view.flatMap { + case (id, Left(st: SkeletonTracing)) => Some(id, st) + case _ => None + }.toList + + // Assumes that there is at most one skeleton layer per annotation. This is true as of this writing + def getSkeletonId: Option[String] = + getSkeletons.headOption.map(_._1) + + def getEditableMappingTracingIds: List[String] = editableMappingsByTracingId.keys.toList + + def getEditableMappingsInfo: List[(String, EditableMappingInfo)] = + editableMappingsByTracingId.view.flatMap { + case (id, (info: EditableMappingInfo, _)) => Some(id, info) + case _ => None + }.toList + + def getVolume(tracingId: String): Box[VolumeTracing] = + for { + tracingEither <- tracingsById.get(tracingId) + volumeTracing <- tracingEither match { + case Right(vt: VolumeTracing) => Full(vt) + case _ => Failure(f"Tried to access tracing $tracingId as volume, but is skeleton") + } + } yield volumeTracing + + def volumesThatHaveEditableMapping: List[(VolumeTracing, String)] = + tracingsById.view.flatMap { + case (id, Right(vt: VolumeTracing)) if vt.getHasEditableMapping => Some((vt, id)) + case _ => None + }.toList + + def getEditableMappingInfo(tracingId: String): Box[EditableMappingInfo] = + for { + (info, _) <- editableMappingsByTracingId.get(tracingId) + } yield info + + def getEditableMappingUpdater(tracingId: String): Option[EditableMappingUpdater] = + for { + (_, updater) <- editableMappingsByTracingId.get(tracingId) + } yield updater + + def version: Long = annotation.version + + def addLayer(a: AddLayerAnnotationAction, + tracingId: String, + tracing: Either[SkeletonTracing, VolumeTracing]): AnnotationWithTracings = + this.copy( + annotation = annotation.copy( + annotationLayers = annotation.annotationLayers :+ AnnotationLayerProto( + tracingId, + a.layerParameters.name.getOrElse(AnnotationLayer.defaultNameForType(a.layerParameters.typ)), + typ = AnnotationLayerType.toProto(a.layerParameters.typ) + )), + tracingsById = tracingsById.updated(tracingId, tracing) + ) + + def deleteLayer(a: DeleteLayerAnnotationAction): AnnotationWithTracings = + this.copy( + annotation = annotation.copy(annotationLayers = annotation.annotationLayers.filter(_.tracingId != a.tracingId)), + tracingsById = tracingsById.removed(a.tracingId) + ) + + def updateLayerMetadata(a: UpdateLayerMetadataAnnotationAction): AnnotationWithTracings = + this.copy(annotation = annotation.copy(annotationLayers = annotation.annotationLayers.map(l => + if (l.tracingId == a.tracingId) l.copy(name = a.layerName) else l))) + + def updateMetadata(a: UpdateMetadataAnnotationAction): AnnotationWithTracings = + a.description.map { newDescription => + this.copy(annotation = annotation.copy(description = newDescription)) + }.getOrElse(this) + + def withVersion(newVersion: Long): AnnotationWithTracings = { + val tracingsUpdated = tracingsById.view.mapValues { + case Left(t: SkeletonTracing) => Left(t.withVersion(newVersion)) + case Right(t: VolumeTracing) => Right(t.withVersion(newVersion)) + } + this.copy( + annotation = annotation.copy(version = newVersion, + skeletonMayHavePendingUpdates = None, + editableMappingsMayHavePendingUpdates = None), + tracingsById = tracingsUpdated.toMap + ) + } + + def withNewUpdaters(materializedVersion: Long, targetVersion: Long): AnnotationWithTracings = { + val editableMappingsUpdated = editableMappingsByTracingId.view.mapValues { + case (mapping, updater) => (mapping, updater.newWithTargetVersion(materializedVersion, targetVersion)) + } + this.copy(editableMappingsByTracingId = editableMappingsUpdated.toMap) + } + + def addEditableMapping(volumeTracingId: String, + editableMappingInfo: EditableMappingInfo, + updater: EditableMappingUpdater): AnnotationWithTracings = + this.copy(editableMappingsByTracingId = + editableMappingsByTracingId.updated(volumeTracingId, (editableMappingInfo, updater))) + + def applySkeletonAction(a: SkeletonUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + skeletonTracing <- getSkeleton(a.actionTracingId) + updated = a.applyOn(skeletonTracing) + } yield this.copy(tracingsById = tracingsById.updated(a.actionTracingId, Left(updated))) + + def applyVolumeAction(a: ApplyableVolumeUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + volumeTracing <- getVolume(a.actionTracingId) + updated = a.applyOn(volumeTracing) + } yield + AnnotationWithTracings(annotation, + tracingsById.updated(a.actionTracingId, Right(updated)), + editableMappingsByTracingId) + + def applyEditableMappingAction(a: EditableMappingUpdateAction)( + implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + updater: EditableMappingUpdater <- getEditableMappingUpdater(a.actionTracingId).toFox + info <- getEditableMappingInfo(a.actionTracingId).toFox + updated <- updater.applyOneUpdate(info, a) + } yield + this.copy( + editableMappingsByTracingId = editableMappingsByTracingId.updated(a.actionTracingId, (updated, updater))) + + def flushBufferedUpdates()(implicit ec: ExecutionContext): Fox[Unit] = { + val updaters = editableMappingsByTracingId.values.map(_._2).toList + for { + _ <- Fox.serialCombined(updaters)(updater => updater.flushBuffersToFossil()) + } yield () + } + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala new file mode 100644 index 00000000000..08586ac51dc --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -0,0 +1,967 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import collections.SequenceUtils +import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.cache.AlfuCache +import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} +import com.scalableminds.util.time.Instant +import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.{bool2Fox, box2Fox, option2Fox} +import com.scalableminds.webknossos.datastore.Annotation.{ + AnnotationLayerProto, + AnnotationLayerTypeProto, + AnnotationProto +} +import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType +import com.scalableminds.webknossos.tracingstore.tracings._ +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + EditableMappingLayer, + EditableMappingService, + EditableMappingUpdateAction, + EditableMappingUpdater +} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ + CreateNodeSkeletonAction, + DeleteNodeSkeletonAction, + SkeletonUpdateAction, + UpdateTracingSkeletonAction +} +import com.scalableminds.webknossos.tracingstore.tracings.volume._ +import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} +import com.typesafe.scalalogging.LazyLogging +import net.liftweb.common.{Empty, Full} +import play.api.libs.json.{JsObject, JsValue, Json} + +import javax.inject.Inject +import scala.concurrent.ExecutionContext + +class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknossosClient, + editableMappingService: EditableMappingService, + val volumeTracingService: VolumeTracingService, + skeletonTracingService: SkeletonTracingService, + skeletonTracingMigrationService: SkeletonTracingMigrationService, + volumeTracingMigrationService: VolumeTracingMigrationService, + temporaryTracingService: TemporaryTracingService, + val remoteDatastoreClient: TSRemoteDatastoreClient, + tracingDataStore: TracingDataStore) + extends KeyValueStoreImplicits + with FallbackDataHelper + with ProtoGeometryImplicits + with AnnotationReversion + with UpdateGroupHandling + with LazyLogging { + + // two-level caching: outer key: annotation id; inner key version + // This way we cache at most two versions of the same annotation, and at most 1000 different annotations + private lazy val materializedAnnotationWithTracingCache = + AlfuCache[String, AlfuCache[Long, AnnotationWithTracings]](maxCapacity = 1000) + + private def newInnerCache(implicit ec: ExecutionContext): Fox[AlfuCache[Long, AnnotationWithTracings]] = + Fox.successful(AlfuCache[Long, AnnotationWithTracings](maxCapacity = 2)) + + def get(annotationId: String, version: Option[Long])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationProto] = + for { + isTemporaryAnnotation <- temporaryTracingService.isTemporaryAnnotation(annotationId) + annotation <- if (isTemporaryAnnotation) temporaryTracingService.getAnnotation(annotationId) + else + for { + withTracings <- getWithTracings(annotationId, version) ?~> "annotation.notFound" + } yield withTracings.annotation + } yield annotation + + def getMultiple(annotationIds: Seq[String])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Seq[AnnotationProto]] = + Fox.serialCombined(annotationIds) { annotationId => + get(annotationId, None) + } + + private def getWithTracings(annotationId: String, version: Option[Long])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationWithTracings] = + for { + // First, fetch the very newest materialized (even if an older one was requested), to determine correct targetVersion + newestMaterialized <- getNewestMatchingMaterializedAnnotation(annotationId, version = None) ?~> "getNewestMaterialized.failed" + targetVersion <- determineTargetVersion(annotationId, newestMaterialized, version) ?~> "determineTargetVersion.failed" + // When requesting any other than the newest version, do not consider the changes final + reportChangesToWk = version.isEmpty || version.contains(targetVersion) + materializedAnnotationInnerCache <- materializedAnnotationWithTracingCache.getOrLoad(annotationId, + _ => newInnerCache) + updatedAnnotation <- materializedAnnotationInnerCache.getOrLoad( + targetVersion, + _ => getWithTracingsVersioned(annotationId, targetVersion, reportChangesToWk = reportChangesToWk) + ) + } yield updatedAnnotation + + private def getWithTracingsVersioned(annotationId: String, targetVersion: Long, reportChangesToWk: Boolean)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationWithTracings] = + for { + materializedAnnotation <- getNewestMatchingMaterializedAnnotation(annotationId, Some(targetVersion)) + annotationWithTracings <- findTracingsForAnnotation(materializedAnnotation) ?~> "findTracingsForAnnotation.failed" + annotationWithTracingsAndMappings <- findEditableMappingsForAnnotation( + annotationId, + annotationWithTracings, + materializedAnnotation.version, + targetVersion // Note: this targetVersion is used for the updater buffers, and is overwritten for each update group, see annotation.withNewUpdaters + ) ?~> "findEditableMappingsForAnnotation.failed" + updated <- applyPendingUpdates(annotationWithTracingsAndMappings, annotationId, targetVersion, reportChangesToWk) ?~> "applyUpdates.failed" + } yield updated + + def currentMaterializableVersion(annotationId: String): Fox[Long] = + tracingDataStore.annotationUpdates.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) + + def currentMaterializedVersion(annotationId: String): Fox[Long] = + tracingDataStore.annotations.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) + + private def newestMatchingMaterializedSkeletonVersion(tracingId: String, targetVersion: Long): Fox[Long] = + tracingDataStore.skeletons.getVersion(tracingId, + version = Some(targetVersion), + mayBeEmpty = Some(true), + emptyFallback = Some(0L)) + + private def newestMatchingMaterializedEditableMappingVersion(tracingId: String, targetVersion: Long): Fox[Long] = + tracingDataStore.editableMappingsInfo.getVersion(tracingId, + version = Some(targetVersion), + mayBeEmpty = Some(true), + emptyFallback = Some(0L)) + + private def getNewestMatchingMaterializedAnnotation(annotationId: String, + version: Option[Long]): Fox[AnnotationProto] = + for { + keyValuePair <- tracingDataStore.annotations.get[AnnotationProto]( + annotationId, + mayBeEmpty = Some(true), + version = version)(fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" + } yield keyValuePair.value + + private def applyUpdate( + annotationId: String, + annotationWithTracings: AnnotationWithTracings, + updateAction: UpdateAction, + targetVersion: Long // Note: this is not the target version of this one update, but of all pending + )(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = + for { + updated <- updateAction match { + case a: AddLayerAnnotationAction => + addLayer(annotationId, annotationWithTracings, a, targetVersion) + case a: DeleteLayerAnnotationAction => + Fox.successful(annotationWithTracings.deleteLayer(a)) + case a: UpdateLayerMetadataAnnotationAction => + Fox.successful(annotationWithTracings.updateLayerMetadata(a)) + case a: UpdateMetadataAnnotationAction => + Fox.successful(annotationWithTracings.updateMetadata(a)) + case a: SkeletonUpdateAction => + annotationWithTracings.applySkeletonAction(a) ?~> "applySkeletonAction.failed" + case a: UpdateMappingNameVolumeAction if a.isEditable.contains(true) => + for { + withNewEditableMapping <- addEditableMapping(annotationId, annotationWithTracings, a, targetVersion) + withApplyedVolumeAction <- withNewEditableMapping.applyVolumeAction(a) + } yield withApplyedVolumeAction + case a: ApplyableVolumeUpdateAction => + annotationWithTracings.applyVolumeAction(a) + case a: EditableMappingUpdateAction => + annotationWithTracings.applyEditableMappingAction(a) + case a: RevertToVersionAnnotationAction => + revertToVersion(annotationId, annotationWithTracings, a, targetVersion) + case _: ResetToBaseAnnotationAction => + resetToBase(annotationId, annotationWithTracings, targetVersion) + case _: BucketMutatingVolumeUpdateAction => + Fox.successful(annotationWithTracings) // No-op, as bucket-mutating actions are performed eagerly, so not here. + case _: CompactVolumeUpdateAction => + Fox.successful(annotationWithTracings) // No-op, as legacy compacted update actions cannot be applied + case _: UpdateTdCameraAnnotationAction => + Fox.successful(annotationWithTracings) // No-op, exists just to mark these updates in the history / count times + case _ => Fox.failure(s"Received unsupported AnnotationUpdateAction action ${Json.toJson(updateAction)}") + } + } yield updated + + private def addLayer(annotationId: String, + annotationWithTracings: AnnotationWithTracings, + action: AddLayerAnnotationAction, + targetVersion: Long)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + tracingId <- action.tracingId.toFox ?~> "add layer action has no tracingId" + _ <- bool2Fox( + !annotationWithTracings.annotation.annotationLayers + .exists(_.name == action.layerParameters.getNameWithDefault)) ?~> "addLayer.nameInUse" + _ <- bool2Fox( + !annotationWithTracings.annotation.annotationLayers.exists( + _.typ == AnnotationLayerTypeProto.Skeleton && action.layerParameters.typ == AnnotationLayerType.Skeleton)) ?~> "addLayer.onlyOneSkeletonAllowed" + tracing <- remoteWebknossosClient.createTracingFor(annotationId, + action.layerParameters, + previousVersion = targetVersion - 1) + updated = annotationWithTracings.addLayer(action, tracingId, tracing) + } yield updated + + private def revertToVersion( + annotationId: String, + annotationWithTracings: AnnotationWithTracings, + revertAction: RevertToVersionAnnotationAction, + newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = + // Note: works only if revert actions are in separate update groups + for { + _ <- bool2Fox(revertAction.sourceVersion >= annotationWithTracings.annotation.earliestAccessibleVersion) ?~> f"Trying to revert to ${revertAction.sourceVersion}, but earliest accessible is ${annotationWithTracings.annotation.earliestAccessibleVersion}" + before = Instant.now + sourceAnnotation: AnnotationWithTracings <- getWithTracings(annotationId, Some(revertAction.sourceVersion)) + _ <- revertDistributedElements(annotationWithTracings, sourceAnnotation, revertAction.sourceVersion, newVersion) + _ = Instant.logSince( + before, + s"Reverting annotation $annotationId from v${annotationWithTracings.version} to v${revertAction.sourceVersion}") + } yield sourceAnnotation + + private def resetToBase(annotationId: String, annotationWithTracings: AnnotationWithTracings, newVersion: Long)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationWithTracings] = { + // Note: works only if reset actions are in separate update groups + val sourceVersion = 0L // Tasks are always created with as v0 currently + val before = Instant.now + for { + sourceAnnotation: AnnotationWithTracings <- getWithTracings(annotationId, Some(sourceVersion)) + _ <- revertDistributedElements(annotationWithTracings, sourceAnnotation, sourceVersion, newVersion) + _ = Instant.logSince(before, s"Resetting annotation $annotationId to base (v$sourceVersion)") + } yield sourceAnnotation + } + + def saveAnnotationProto(annotationId: String, + version: Long, + annotationProto: AnnotationProto, + toTemporaryStore: Boolean = false): Fox[Unit] = + if (toTemporaryStore) + temporaryTracingService.saveAnnotationProto(annotationId, annotationProto) + else + tracingDataStore.annotations.put(annotationId, version, annotationProto) + + def updateActionLog(annotationId: String, newestVersion: Long, oldestVersion: Long)( + implicit ec: ExecutionContext): Fox[JsValue] = { + def versionedTupleToJson(tuple: (Long, List[UpdateAction])): JsObject = + Json.obj( + "version" -> tuple._1, + "value" -> Json.toJson(tuple._2) + ) + + val batchRanges = SequenceUtils.batchRangeInclusive(oldestVersion, newestVersion, batchSize = 1000).reverse + for { + updateActionBatches <- Fox.serialCombined(batchRanges.toList) { batchRange => + val batchFrom = batchRange._1 + val batchTo = batchRange._2 + tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( + annotationId, + Some(batchTo), + Some(batchFrom))(fromJsonBytes[List[UpdateAction]]) + } + } yield Json.toJson(updateActionBatches.flatten.map(versionedTupleToJson)) + } + + def findEditableMappingInfo(annotationId: String, tracingId: String, version: Option[Long] = None)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[EditableMappingInfo] = + for { + annotation <- getWithTracings(annotationId, version) ?~> "getWithTracings.failed" + tracing <- annotation.getEditableMappingInfo(tracingId) ?~> "getEditableMapping.failed" + } yield tracing + + private def addEditableMapping( + annotationId: String, + annotationWithTracings: AnnotationWithTracings, + action: UpdateMappingNameVolumeAction, + targetVersion: Long)(implicit tc: TokenContext, ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + volumeTracing <- annotationWithTracings.getVolume(action.actionTracingId).toFox + _ <- assertMappingIsNotLocked(volumeTracing) + baseMappingName <- volumeTracing.mappingName.toFox ?~> "makeEditable.failed.noBaseMapping" + _ <- bool2Fox(volumeTracingService.volumeBucketsAreEmpty(action.actionTracingId)) ?~> "annotation.volumeBucketsNotEmpty" + editableMappingInfo = editableMappingService.create(baseMappingName) + updater <- editableMappingUpdaterFor(annotationId, + action.actionTracingId, + volumeTracing, + editableMappingInfo, + annotationWithTracings.version, + targetVersion) + } yield annotationWithTracings.addEditableMapping(action.actionTracingId, editableMappingInfo, updater) + + private def assertMappingIsNotLocked(volumeTracing: VolumeTracing)(implicit ec: ExecutionContext): Fox[Unit] = + bool2Fox(!volumeTracing.mappingIsLocked.getOrElse(false)) ?~> "annotation.mappingIsLocked" + + private def applyPendingUpdates( + annotationWithTracingsAndMappings: AnnotationWithTracings, + annotationId: String, + targetVersion: Long, + reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = + for { + updateGroupsAsSaved <- findPendingUpdates(annotationId, annotationWithTracingsAndMappings, targetVersion) ?~> "findPendingUpdates.failed" + updatesGroupsRegrouped = regroupByIsolationSensitiveActions(updateGroupsAsSaved) + updated <- applyUpdatesGrouped(annotationWithTracingsAndMappings, + annotationId, + updatesGroupsRegrouped, + reportChangesToWk) ?~> "applyUpdates.inner.failed" + } yield + updated.withVersion(targetVersion) // set version again, because extraSkeleton update filtering may skip latest version + + private def findPendingUpdates(annotationId: String, annotation: AnnotationWithTracings, desiredVersion: Long)( + implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = + for { + extraSkeletonUpdates <- findExtraSkeletonUpdates(annotationId, annotation, desiredVersion) + extraEditableMappingUpdates <- findExtraEditableMappingUpdates(annotationId, annotation, desiredVersion) + existingVersion = annotation.version + pendingAnnotationUpdates <- if (desiredVersion == existingVersion) Fox.successful(List.empty) + else { + tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( + annotationId, + Some(desiredVersion), + Some(existingVersion + 1))(fromJsonBytes[List[UpdateAction]]) + } + } yield extraSkeletonUpdates ++ extraEditableMappingUpdates ++ pendingAnnotationUpdates + + /* + * The migration of https://github.com/scalableminds/webknossos/pull/7917 does not guarantee that the skeleton layer + * is materialized at the same version as the annottation. So even if we have an existing annotation version, + * we may fetch skeleton updates *older* than it, in order to fully construct the state of that version. + * Only annotations from before that migration have this skeletonMayHavePendingUpdates=Some(true). + */ + private def findExtraSkeletonUpdates(annotationId: String, annotation: AnnotationWithTracings, targetVersion: Long)( + implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = + if (annotation.annotation.skeletonMayHavePendingUpdates.getOrElse(false)) { + annotation.getSkeletonId.map { skeletonId => + for { + materializedSkeletonVersion <- newestMatchingMaterializedSkeletonVersion(skeletonId, targetVersion) + extraUpdates <- if (materializedSkeletonVersion < annotation.version) { + tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( + annotationId, + Some(annotation.version), + Some(materializedSkeletonVersion + 1))(fromJsonBytes[List[UpdateAction]]) + } else Fox.successful(List.empty) + extraSkeletonUpdates = filterSkeletonUpdates(extraUpdates) + } yield extraSkeletonUpdates + }.getOrElse(Fox.successful(List.empty)) + } else Fox.successful(List.empty) + + private def filterSkeletonUpdates( + updateGroups: List[(Long, List[UpdateAction])]): List[(Long, List[SkeletonUpdateAction])] = + updateGroups.flatMap { + case (version, updateGroup) => + val updateGroupFiltered = updateGroup.flatMap { + case a: SkeletonUpdateAction => Some(a) + case _ => None + } + if (updateGroupFiltered.nonEmpty) { + Some((version, updateGroupFiltered)) + } else None + } + + // Same problem as with skeletons, see comment above + // Note that the EditableMappingUpdaters are passed only the “oldVersion” that is the materialized annotation version + // not the actual materialized editableMapping version, but that should yield the same data when loading from fossil. + private def findExtraEditableMappingUpdates( + annotationId: String, + annotation: AnnotationWithTracings, + targetVersion: Long)(implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = + if (annotation.annotation.skeletonMayHavePendingUpdates.getOrElse(false)) { + for { + updatesByEditableMapping <- Fox.serialCombined(annotation.getEditableMappingTracingIds) { tracingId => + for { + materializedEditableMappingVersion <- newestMatchingMaterializedEditableMappingVersion(tracingId, + targetVersion) + extraUpdates <- if (materializedEditableMappingVersion < annotation.version) { + tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( + annotationId, + Some(annotation.version), + Some(materializedEditableMappingVersion + 1))(fromJsonBytes[List[UpdateAction]]) + } else Fox.successful(List.empty) + extraUpdatesForThisMapping = filterEditableMappingUpdates(extraUpdates, tracingId) + } yield extraUpdatesForThisMapping + } + } yield updatesByEditableMapping.flatten + } else Fox.successful(List.empty) + + private def filterEditableMappingUpdates(updateGroups: List[(Long, List[UpdateAction])], + tracingId: String): List[(Long, List[EditableMappingUpdateAction])] = + updateGroups.map { + case (version, updateGroup) => + val updateGroupFiltered = updateGroup.flatMap { + case a: EditableMappingUpdateAction if a.actionTracingId == tracingId => Some(a) + case _ => None + } + (version, updateGroupFiltered) + } + + private def findTracingsForAnnotation(annotation: AnnotationProto)( + implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { + val skeletonTracingIds = + annotation.annotationLayers.filter(_.typ == AnnotationLayerTypeProto.Skeleton).map(_.tracingId) + val volumeTracingIds = + annotation.annotationLayers.filter(_.typ == AnnotationLayerTypeProto.Volume).map(_.tracingId) + for { + skeletonTracings <- Fox.serialCombined(skeletonTracingIds.toList)(id => + findSkeletonRaw(id, Some(annotation.version))) ?~> "findSkeletonRaw.failed" + volumeTracings <- Fox.serialCombined(volumeTracingIds.toList)(id => findVolumeRaw(id, Some(annotation.version))) ?~> "findVolumeRaw.failed" + skeletonTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = skeletonTracingIds + .zip(skeletonTracings.map(versioned => Left[SkeletonTracing, VolumeTracing](versioned.value))) + .toMap + volumeTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = volumeTracingIds + .zip(volumeTracings.map(versioned => Right[SkeletonTracing, VolumeTracing](versioned.value))) + .toMap + } yield AnnotationWithTracings(annotation, skeletonTracingsMap ++ volumeTracingsMap, Map.empty) + } + + private def findEditableMappingsForAnnotation( + annotationId: String, + annotationWithTracings: AnnotationWithTracings, + currentMaterializedVersion: Long, + targetVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext) = { + val volumeWithEditableMapping = annotationWithTracings.volumesThatHaveEditableMapping + for { + idInfoUpdaterTuples <- Fox.serialCombined(volumeWithEditableMapping) { + case (volumeTracing, volumeTracingId) => + for { + editableMappingInfo <- getEditableMappingInfoRaw(volumeTracingId, annotationWithTracings.version) ?~> "getEditableMappingInfoRaw.failed" + updater <- editableMappingUpdaterFor(annotationId, + volumeTracingId, + volumeTracing, + editableMappingInfo.value, + currentMaterializedVersion, + targetVersion) ?~> "EditableMappingUpdater.initialize.failed" + } yield (editableMappingInfo.key, (editableMappingInfo.value, updater)) + } + } yield annotationWithTracings.copy(editableMappingsByTracingId = idInfoUpdaterTuples.toMap) + } + + private def getEditableMappingInfoRaw(volumeTracingId: String, + version: Long): Fox[VersionedKeyValuePair[EditableMappingInfo]] = + tracingDataStore.editableMappingsInfo.get(volumeTracingId, version = Some(version))( + fromProtoBytes[EditableMappingInfo]) + + private def editableMappingUpdaterFor(annotationId: String, + tracingId: String, + remoteFallbackLayer: RemoteFallbackLayer, + editableMappingInfo: EditableMappingInfo, + currentMaterializedVersion: Long, + targetVersion: Long)(implicit tc: TokenContext): EditableMappingUpdater = + new EditableMappingUpdater( + annotationId, + tracingId, + editableMappingInfo.baseMappingName, + currentMaterializedVersion, + targetVersion, + remoteFallbackLayer, + tc, + remoteDatastoreClient, + editableMappingService, + this, + tracingDataStore + ) + + private def editableMappingUpdaterFor( + annotationId: String, + tracingId: String, + volumeTracing: VolumeTracing, + editableMappingInfo: EditableMappingInfo, + currentMaterializedVersion: Long, + targetVersion: Long)(implicit tc: TokenContext, ec: ExecutionContext): Fox[EditableMappingUpdater] = + for { + remoteFallbackLayer <- remoteFallbackLayerFromVolumeTracing(volumeTracing, tracingId) + } yield + editableMappingUpdaterFor(annotationId, + tracingId, + remoteFallbackLayer, + editableMappingInfo, + currentMaterializedVersion, + targetVersion) + + private def applyUpdatesGrouped( + annotation: AnnotationWithTracings, + annotationId: String, + updateGroups: List[(Long, List[UpdateAction])], + reportChangesToWk: Boolean + )(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { + def updateGroupedIter(annotationWithTracingsFox: Fox[AnnotationWithTracings], + remainingUpdateGroups: List[(Long, List[UpdateAction])]): Fox[AnnotationWithTracings] = + annotationWithTracingsFox.futureBox.flatMap { + case Empty => Fox.empty + case Full(annotationWithTracings) => + remainingUpdateGroups match { + case List() => Fox.successful(annotationWithTracings) + case updateGroup :: tail => + updateGroupedIter( + applyUpdates(annotationWithTracings, annotationId, updateGroup._2, updateGroup._1, reportChangesToWk), + tail) + } + case _ => annotationWithTracingsFox + } + + updateGroupedIter(Some(annotation), updateGroups) + } + + private def applyUpdates( + annotationWithTracings: AnnotationWithTracings, + annotationId: String, + updates: List[UpdateAction], + targetVersion: Long, + reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { + + def updateIter(annotationWithTracingsFox: Fox[AnnotationWithTracings], + remainingUpdates: List[UpdateAction]): Fox[AnnotationWithTracings] = + annotationWithTracingsFox.futureBox.flatMap { + case Empty => Fox.empty + case Full(annotationWithTracings) => + remainingUpdates match { + case List() => Fox.successful(annotationWithTracings) + case update :: tail => + updateIter(applyUpdate(annotationId, annotationWithTracings, update, targetVersion), tail) + } + case _ => annotationWithTracingsFox + } + + if (updates.isEmpty) Full(annotationWithTracings) + else { + for { + updated <- updateIter( + Some(annotationWithTracings.withNewUpdaters(annotationWithTracings.version, targetVersion)), + updates) + updatedWithNewVerson = updated.withVersion(targetVersion) + _ <- updatedWithNewVerson.flushBufferedUpdates() + _ <- flushUpdatedTracings(updatedWithNewVerson, updates) + _ <- flushAnnotationInfo(annotationId, updatedWithNewVerson) + _ <- Fox.runIf(reportChangesToWk && annotationWithTracings.annotation != updated.annotation)( + remoteWebknossosClient.updateAnnotation(annotationId, updatedWithNewVerson.annotation)) + } yield updatedWithNewVerson + } + } + + private def flushUpdatedTracings(annotationWithTracings: AnnotationWithTracings, updates: List[UpdateAction])( + implicit ec: ExecutionContext) = { + // Flush updated tracing objects, but only if they were updated. + // If they weren’t updated, the older versions that will automatically be fetched are guaranteed identical + val allMayHaveUpdates = updates.exists { update: UpdateAction => + update match { + case _: RevertToVersionAnnotationAction => true + case _: ResetToBaseAnnotationAction => true + case _ => false + } + } + val tracingIdsWithUpdates: Set[String] = updates.flatMap { + case a: LayerUpdateAction => Some(a.actionTracingId) + case a: AddLayerAnnotationAction => a.tracingId // tracingId is an option, but filled on save. Drop Nones + case _ => None + }.toSet + for { + _ <- Fox.serialCombined(annotationWithTracings.getVolumes) { + case (volumeTracingId, volumeTracing) if allMayHaveUpdates || tracingIdsWithUpdates.contains(volumeTracingId) => + tracingDataStore.volumes.put(volumeTracingId, volumeTracing.version, volumeTracing) + case _ => Fox.successful(()) + } + _ <- Fox.serialCombined(annotationWithTracings.getSkeletons) { + case (skeletonTracingId, skeletonTracing: SkeletonTracing) + if allMayHaveUpdates || tracingIdsWithUpdates.contains(skeletonTracingId) => + tracingDataStore.skeletons.put(skeletonTracingId, skeletonTracing.version, skeletonTracing) + case _ => Fox.successful(()) + } + _ <- Fox.serialCombined(annotationWithTracings.getEditableMappingsInfo) { + case (volumeTracingId, editableMappingInfo) + if allMayHaveUpdates || tracingIdsWithUpdates.contains(volumeTracingId) => + tracingDataStore.editableMappingsInfo.put(volumeTracingId, + annotationWithTracings.version, + editableMappingInfo) + case _ => Fox.successful(()) + } + } yield () + } + + private def flushAnnotationInfo(annotationId: String, annotationWithTracings: AnnotationWithTracings) = + saveAnnotationProto(annotationId, annotationWithTracings.version, annotationWithTracings.annotation) + + private def determineTargetVersion(annotationId: String, + newestMaterializedAnnotation: AnnotationProto, + requestedVersionOpt: Option[Long]): Fox[Long] = + /* + * Determines the newest saved version from the updates column. + * if there are no updates at all, assume annotation is brand new (possibly created from NML, + * hence the emptyFallbck newestMaterializedAnnotation.version) + */ + for { + newestUpdateVersion <- tracingDataStore.annotationUpdates.getVersion(annotationId, + mayBeEmpty = Some(true), + emptyFallback = + Some(newestMaterializedAnnotation.version)) + targetVersion = requestedVersionOpt match { + case None => newestUpdateVersion + case Some(requestedVersion) => + math.max(newestMaterializedAnnotation.earliestAccessibleVersion, + math.min(requestedVersion, newestUpdateVersion)) + } + } yield targetVersion + + def updateActionStatistics(tracingId: String): Fox[JsObject] = + for { + updateActionGroups <- tracingDataStore.annotationUpdates.getMultipleVersions(tracingId)( + fromJsonBytes[List[UpdateAction]]) + updateActions = updateActionGroups.flatten + } yield { + Json.obj( + "updateTracingActionCount" -> updateActions.count { + case _: UpdateTracingSkeletonAction => true + case _ => false + }, + "createNodeActionCount" -> updateActions.count { + case _: CreateNodeSkeletonAction => true + case _ => false + }, + "deleteNodeActionCount" -> updateActions.count { + case _: DeleteNodeSkeletonAction => true + case _ => false + } + ) + } + + def editableMappingLayer(annotationId: String, tracingId: String, tracing: VolumeTracing)( + implicit tc: TokenContext): EditableMappingLayer = + EditableMappingLayer( + tracingId, + tracing.boundingBox, + resolutions = tracing.mags.map(vec3IntFromProto).toList, + largestSegmentId = Some(0L), + elementClass = tracing.elementClass, + tc, + tracing = tracing, + annotationId = annotationId, + tracingId = tracingId, + annotationService = this, + editableMappingService = editableMappingService + ) + + def baseMappingName(annotationId: String, tracingId: String, tracing: VolumeTracing)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Option[String]] = + if (tracing.getHasEditableMapping) + for { + editableMappingInfo <- findEditableMappingInfo(annotationId, tracingId) + } yield Some(editableMappingInfo.baseMappingName) + else Fox.successful(tracing.mappingName) + + def findVolumeRaw(tracingId: String, version: Option[Long] = None): Fox[VersionedKeyValuePair[VolumeTracing]] = + tracingDataStore.volumes + .get[VolumeTracing](tracingId, version, mayBeEmpty = Some(true))(fromProtoBytes[VolumeTracing]) + + private def findSkeletonRaw(tracingId: String, version: Option[Long]): Fox[VersionedKeyValuePair[SkeletonTracing]] = + tracingDataStore.skeletons + .get[SkeletonTracing](tracingId, version, mayBeEmpty = Some(true))(fromProtoBytes[SkeletonTracing]) + + def findVolume(annotationId: String, tracingId: String, version: Option[Long] = None)( + implicit tc: TokenContext, + ec: ExecutionContext): Fox[VolumeTracing] = + for { + isTemporaryTracing <- temporaryTracingService.isTemporaryTracing(tracingId) + tracing <- if (isTemporaryTracing) temporaryTracingService.getVolume(tracingId) + else + for { + annotation <- getWithTracings(annotationId, version) + tracing <- annotation.getVolume(tracingId).toFox + migrated <- volumeTracingMigrationService.migrateTracing(tracing) + } yield migrated + } yield tracing + + def findSkeleton( + annotationId: String, + tracingId: String, + version: Option[Long] = None + )(implicit tc: TokenContext, ec: ExecutionContext): Fox[SkeletonTracing] = + if (tracingId == TracingId.dummy) + Fox.successful(skeletonTracingService.dummyTracing) + else { + for { + isTemporaryTracing <- temporaryTracingService.isTemporaryTracing(tracingId) + tracing <- if (isTemporaryTracing) temporaryTracingService.getSkeleton(tracingId) + else + for { + annotation <- getWithTracings(annotationId, version) + tracing <- annotation.getSkeleton(tracingId).toFox + migrated <- skeletonTracingMigrationService.migrateTracing(tracing) + } yield migrated + } yield tracing + } + + def findMultipleVolumes(selectors: Seq[Option[TracingSelector]])( + implicit tc: TokenContext, + ec: ExecutionContext): Fox[List[Option[VolumeTracing]]] = + Fox.combined { + selectors.map { + case Some(selector) => + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) + tracing <- findVolume(annotationId, selector.tracingId, selector.version).map(Some(_)) + } yield tracing + case None => Fox.successful(None) + } + } + + def findMultipleSkeletons(selectors: Seq[Option[TracingSelector]])( + implicit tc: TokenContext, + ec: ExecutionContext): Fox[List[Option[SkeletonTracing]]] = + Fox.combined { + selectors.map { + case Some(selector) => + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) + tracing <- findSkeleton(annotationId, selector.tracingId, selector.version).map(Some(_)) + } yield tracing + case None => Fox.successful(None) + } + } + + def duplicate( + annotationId: String, + newAnnotationId: String, + version: Option[Long], + isFromTask: Boolean, + datasetBoundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = + for { + v0Annotation <- get(annotationId, Some(0L)) + currentAnnotation <- get(annotationId, version) + + // Duplicate updates + tracingIdMap <- duplicateUpdates(annotationId, + newAnnotationId, + v0Annotation.annotationLayers.map(_.tracingId), + currentAnnotation.version) + + // Duplicate v0 + v0NewLayers <- Fox.serialCombined(v0Annotation.annotationLayers)(layer => + duplicateLayer(annotationId, layer, tracingIdMap, v0Annotation.version, isFromTask, datasetBoundingBox)) + v0DuplicatedAnnotation = v0Annotation.copy(annotationLayers = v0NewLayers, + earliestAccessibleVersion = v0Annotation.version) + + _ <- saveAnnotationProto(newAnnotationId, v0Annotation.version, v0DuplicatedAnnotation) + + // Duplicate current + duplicatedAnnotation <- if (currentAnnotation.version > 0L) { + for { + newLayers <- Fox.serialCombined(currentAnnotation.annotationLayers)( + layer => + duplicateLayer(annotationId, + layer, + tracingIdMap, + currentAnnotation.version, + isFromTask, + datasetBoundingBox)) + currentDuplicatedAnnotation = currentAnnotation.copy(annotationLayers = newLayers, + earliestAccessibleVersion = currentAnnotation.version) + _ <- saveAnnotationProto(newAnnotationId, currentAnnotation.version, currentDuplicatedAnnotation) + } yield currentDuplicatedAnnotation + } else Fox.successful(v0DuplicatedAnnotation) + + } yield duplicatedAnnotation + + private def duplicateUpdates(annotationId: String, + newAnnotationId: String, + v0TracingIds: Seq[String], + newestVersion: Long)(implicit ec: ExecutionContext): Fox[Map[String, String]] = { + val tracingIdMapMutable = scala.collection.mutable.Map[String, String]() + v0TracingIds.foreach { v0TracingId => + tracingIdMapMutable.put(v0TracingId, TracingId.generate) + } + val updateBatchRanges = SequenceUtils.batchRangeInclusive(0L, newestVersion, batchSize = 100) + Fox + .serialCombined(updateBatchRanges.toList) { batchRange => + for { + updateLists: Seq[(Long, List[UpdateAction])] <- tracingDataStore.annotationUpdates + .getMultipleVersionsAsVersionValueTuple( + annotationId, + oldestVersion = Some(batchRange._1), + newestVersion = Some(batchRange._2))(fromJsonBytes[List[UpdateAction]]) + _ <- Fox.serialCombined(updateLists) { + case (version, updateList) => + for { + updateListAdapted <- Fox.serialCombined(updateList) { + case a: AddLayerAnnotationAction => + for { + actionTracingId <- a.tracingId ?~> "duplicating addLayer without tracingId" + _ = if (!tracingIdMapMutable.contains(actionTracingId)) { + a.tracingId.foreach(actionTracingId => + tracingIdMapMutable.put(actionTracingId, TracingId.generate)) + } + mappedTracingId <- tracingIdMapMutable.get(actionTracingId) ?~> "duplicating action for unknown layer" + } yield a.copy(tracingId = Some(mappedTracingId)) + case a: LayerUpdateAction => + for { + mappedTracingId <- tracingIdMapMutable.get(a.actionTracingId) ?~> "duplicating action for unknown layer" + } yield a.withActionTracingId(mappedTracingId) + } + _ <- tracingDataStore.annotationUpdates.put(newAnnotationId, version, Json.toJson(updateListAdapted)) + } yield () + } + } yield () + } + .map(_ => tracingIdMapMutable.toMap) + } + + private def duplicateLayer(annotationId: String, + layer: AnnotationLayerProto, + tracingIdMap: Map[String, String], + version: Long, + isFromTask: Boolean, + datasetBoundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationLayerProto] = + for { + newTracingId <- tracingIdMap.get(layer.tracingId) ?~> "duplicate unknown layer" + _ <- layer.typ match { + case AnnotationLayerTypeProto.Volume => + duplicateVolumeTracing(annotationId, + layer.tracingId, + version, + newTracingId, + version, + isFromTask, + None, + datasetBoundingBox, + MagRestrictions.empty, + None, + None) + case AnnotationLayerTypeProto.Skeleton => + duplicateSkeletonTracing(annotationId, + layer.tracingId, + version, + newTracingId, + version, + isFromTask, + None, + None, + None) + case AnnotationLayerTypeProto.Unrecognized(num) => Fox.failure(f"unrecognized annotation layer type: $num") + } + } yield layer.copy(tracingId = newTracingId) + + def duplicateVolumeTracing( + sourceAnnotationId: String, + sourceTracingId: String, + sourceVersion: Long, + newTracingId: String, + newVersion: Long, + isFromTask: Boolean, + boundingBox: Option[BoundingBox], + datasetBoundingBox: Option[BoundingBox], + magRestrictions: MagRestrictions, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = + for { + sourceTracing <- findVolume(sourceAnnotationId, sourceTracingId, Some(sourceVersion)) + newTracing <- volumeTracingService.adaptVolumeForDuplicate(sourceTracingId, + newTracingId, + sourceTracing, + isFromTask, + boundingBox, + datasetBoundingBox, + magRestrictions, + editPosition, + editRotation, + newVersion) + _ <- tracingDataStore.volumes.put(newTracingId, newVersion, newTracing) + _ <- Fox.runIf(!newTracing.getHasEditableMapping)( + volumeTracingService.duplicateVolumeData(sourceTracingId, sourceTracing, newTracingId, newTracing)) + _ <- Fox.runIf(newTracing.getHasEditableMapping)( + duplicateEditableMapping(sourceAnnotationId, sourceTracingId, newTracingId, sourceVersion, newVersion)) + } yield newTracingId + + private def duplicateEditableMapping(sourceAnnotationId: String, + sourceTracingId: String, + newTracingId: String, + sourceVersion: Long, + newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = + for { + editableMappingInfo <- findEditableMappingInfo(sourceAnnotationId, sourceTracingId, Some(sourceVersion)) + _ <- tracingDataStore.editableMappingsInfo.put(newTracingId, newVersion, toProtoBytes(editableMappingInfo)) + _ <- editableMappingService.duplicateSegmentToAgglomerate(sourceTracingId, + newTracingId, + sourceVersion, + newVersion) + _ <- editableMappingService.duplicateAgglomerateToGraph(sourceTracingId, newTracingId, sourceVersion, newVersion) + } yield () + + def duplicateSkeletonTracing( + sourceAnnotationId: String, + sourceTracingId: String, + sourceVersion: Long, + newTracingId: String, + newVersion: Long, + isFromTask: Boolean, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double], + boundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = + for { + skeleton <- findSkeleton(sourceAnnotationId, sourceTracingId, Some(sourceVersion)) + adaptedSkeleton = skeletonTracingService.adaptSkeletonForDuplicate(skeleton, + isFromTask, + editPosition, + editRotation, + boundingBox, + newVersion) + _ <- tracingDataStore.skeletons.put(newTracingId, newVersion, adaptedSkeleton) + } yield newTracingId + + private def mergeEditableMappingUpdates(annotationIds: List[String], newTracingId: String)( + implicit ec: ExecutionContext): Fox[List[EditableMappingUpdateAction]] = + for { + updatesByAnnotation <- Fox.serialCombined(annotationIds) { annotationId => + for { + updateGroups <- tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple(annotationId)( + fromJsonBytes[List[UpdateAction]]) + updatesIroned: Seq[UpdateAction] = ironOutReverts(updateGroups) + editableMappingUpdates = updatesIroned.flatMap { + case a: EditableMappingUpdateAction => Some(a.withActionTracingId(newTracingId)) + case _ => None + } + } yield editableMappingUpdates + } + } yield updatesByAnnotation.flatten + + def mergeEditableMappings(annotationIds: List[String], + newAnnotationId: String, + newVolumeTracingId: String, + tracingsWithIds: List[(VolumeTracing, String)], + toTemporaryStore: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Long] = + if (tracingsWithIds.nonEmpty && tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { + for { + before <- Instant.nowFox + _ <- bool2Fox(!toTemporaryStore) ?~> "Cannot merge editable mappings to temporary store (trying to merge compound annotations?)" + remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => + remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) + remoteFallbackLayer <- SequenceUtils.findUniqueElement(remoteFallbackLayers) ?~> "Cannot merge editable mappings based on different dataset layers" + editableMappingInfos <- Fox.serialCombined(tracingsWithIds) { tracingWithId => + tracingDataStore.editableMappingsInfo.get(tracingWithId._2)(fromProtoBytes[EditableMappingInfo]) + } + baseMappingName <- SequenceUtils.findUniqueElement(editableMappingInfos.map(_.value.baseMappingName)) ?~> "Cannot merge editable mappings based on different base mappings" + linearizedEditableMappingUpdates: List[UpdateAction] <- mergeEditableMappingUpdates(annotationIds, + newVolumeTracingId) + targetVersion = linearizedEditableMappingUpdates.length + _ <- Fox.runIf(!toTemporaryStore) { + var updateVersion = 1L + Fox.serialCombined(linearizedEditableMappingUpdates) { update: UpdateAction => + for { + _ <- tracingDataStore.annotationUpdates.put(newVolumeTracingId, updateVersion, Json.toJson(List(update))) + _ = updateVersion += 1 + } yield () + } + } + editableMappingInfo = editableMappingService.create(baseMappingName) + updater = editableMappingUpdaterFor(newAnnotationId, + newVolumeTracingId, + remoteFallbackLayer, + editableMappingInfo, + 0L, + targetVersion) + _ <- updater.applyUpdatesAndSave(editableMappingInfo, linearizedEditableMappingUpdates) + _ = Instant.logSince( + before, + s"Merging ${tracingsWithIds.length} editable mappings by applying ${linearizedEditableMappingUpdates.length} updates") + } yield targetVersion + } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { + Fox.empty + } else { + Fox.failure("Cannot merge annotations with and without editable mappings") + } + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala new file mode 100644 index 00000000000..fd2db0280e7 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -0,0 +1,221 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + MergeAgglomerateUpdateAction, + SplitAgglomerateUpdateAction +} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating._ +import com.scalableminds.webknossos.tracingstore.tracings.volume._ +import play.api.libs.json._ + +trait UpdateAction { + def actionTimestamp: Option[Long] + + def addTimestamp(timestamp: Long): UpdateAction + + def addInfo(info: Option[String]): UpdateAction + + def addAuthorId(authorId: Option[String]): UpdateAction + + def isViewOnlyChange: Boolean = false +} + +trait ApplyImmediatelyUpdateAction extends UpdateAction + +trait LayerUpdateAction extends UpdateAction { + def actionTracingId: String + def withActionTracingId(newTracingId: String): LayerUpdateAction +} + +object UpdateAction { + + implicit object updateActionFormat extends Format[UpdateAction] { + override def reads(json: JsValue): JsResult[UpdateAction] = { + val jsonValue = (json \ "value").as[JsObject] + if ((json \ "isCompacted").asOpt[Boolean].getOrElse(false)) { + deserialize[CompactVolumeUpdateAction](json) + } else { + (json \ "name").as[String] match { + // Skeleton + case "createTree" => deserialize[CreateTreeSkeletonAction](jsonValue) + case "deleteTree" => deserialize[DeleteTreeSkeletonAction](jsonValue) + case "updateTree" => deserialize[UpdateTreeSkeletonAction](jsonValue) + case "mergeTree" => deserialize[MergeTreeSkeletonAction](jsonValue) + case "moveTreeComponent" => deserialize[MoveTreeComponentSkeletonAction](jsonValue) + case "createNode" => deserialize[CreateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) + case "deleteNode" => deserialize[DeleteNodeSkeletonAction](jsonValue) + case "updateNode" => deserialize[UpdateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) + case "createEdge" => deserialize[CreateEdgeSkeletonAction](jsonValue) + case "deleteEdge" => deserialize[DeleteEdgeSkeletonAction](jsonValue) + case "updateTreeGroups" => deserialize[UpdateTreeGroupsSkeletonAction](jsonValue) + case "updateSkeletonTracing" => deserialize[UpdateTracingSkeletonAction](jsonValue) + case "updateTreeVisibility" => deserialize[UpdateTreeVisibilitySkeletonAction](jsonValue) + case "updateTreeGroupVisibility" => deserialize[UpdateTreeGroupVisibilitySkeletonAction](jsonValue) + case "updateTreeEdgesVisibility" => deserialize[UpdateTreeEdgesVisibilitySkeletonAction](jsonValue) + case "updateUserBoundingBoxesInSkeletonTracing" => + deserialize[UpdateUserBoundingBoxesSkeletonAction](jsonValue) + case "updateUserBoundingBoxVisibilityInSkeletonTracing" => + deserialize[UpdateUserBoundingBoxVisibilitySkeletonAction](jsonValue) + + // Volume + case "updateBucket" => deserialize[UpdateBucketVolumeAction](jsonValue) + case "updateVolumeTracing" => deserialize[UpdateTracingVolumeAction](jsonValue) + case "updateUserBoundingBoxesInVolumeTracing" => + deserialize[UpdateUserBoundingBoxesVolumeAction](jsonValue) + case "updateUserBoundingBoxVisibilityInVolumeTracing" => + deserialize[UpdateUserBoundingBoxVisibilityVolumeAction](jsonValue) + case "removeFallbackLayer" => deserialize[RemoveFallbackLayerVolumeAction](jsonValue) + case "importVolumeTracing" => deserialize[ImportVolumeDataVolumeAction](jsonValue) + case "createSegment" => deserialize[CreateSegmentVolumeAction](jsonValue) + case "updateSegment" => deserialize[UpdateSegmentVolumeAction](jsonValue) + case "updateSegmentGroups" => deserialize[UpdateSegmentGroupsVolumeAction](jsonValue) + case "deleteSegment" => deserialize[DeleteSegmentVolumeAction](jsonValue) + case "deleteSegmentData" => deserialize[DeleteSegmentDataVolumeAction](jsonValue) + case "updateMappingName" => deserialize[UpdateMappingNameVolumeAction](jsonValue) + case "addSegmentIndex" => deserialize[AddSegmentIndexVolumeAction](jsonValue) + + // Editable Mapping + case "mergeAgglomerate" => deserialize[MergeAgglomerateUpdateAction](jsonValue) + case "splitAgglomerate" => deserialize[SplitAgglomerateUpdateAction](jsonValue) + + // Annotation + case "addLayerToAnnotation" => deserialize[AddLayerAnnotationAction](jsonValue) + case "deleteLayerFromAnnotation" => deserialize[DeleteLayerAnnotationAction](jsonValue) + case "updateLayerMetadata" => deserialize[UpdateLayerMetadataAnnotationAction](jsonValue) + case "updateMetadataOfAnnotation" => deserialize[UpdateMetadataAnnotationAction](jsonValue) + case "revertToVersion" => deserialize[RevertToVersionAnnotationAction](jsonValue) + case "resetToBase" => deserialize[ResetToBaseAnnotationAction](jsonValue) + case "updateTdCamera" => deserialize[UpdateTdCameraAnnotationAction](jsonValue) + + case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") + } + } + } + + private def deserialize[T](json: JsValue, shouldTransformPositions: Boolean = false)( + implicit tjs: Reads[T]): JsResult[T] = + if (shouldTransformPositions) + json.transform(positionTransform).get.validate[T] + else + json.validate[T] + + private val positionTransform = + (JsPath \ "position").json.update(JsPath.read[List[Float]].map(position => Json.toJson(position.map(_.toInt)))) + + override def writes(a: UpdateAction): JsValue = a match { + // Skeleton + case s: CreateTreeSkeletonAction => + Json.obj("name" -> "createTree", "value" -> Json.toJson(s)(CreateTreeSkeletonAction.jsonFormat)) + case s: DeleteTreeSkeletonAction => + Json.obj("name" -> "deleteTree", "value" -> Json.toJson(s)(DeleteTreeSkeletonAction.jsonFormat)) + case s: UpdateTreeSkeletonAction => + Json.obj("name" -> "updateTree", "value" -> Json.toJson(s)(UpdateTreeSkeletonAction.jsonFormat)) + case s: MergeTreeSkeletonAction => + Json.obj("name" -> "mergeTree", "value" -> Json.toJson(s)(MergeTreeSkeletonAction.jsonFormat)) + case s: MoveTreeComponentSkeletonAction => + Json.obj("name" -> "moveTreeComponent", "value" -> Json.toJson(s)(MoveTreeComponentSkeletonAction.jsonFormat)) + case s: CreateNodeSkeletonAction => + Json.obj("name" -> "createNode", "value" -> Json.toJson(s)(CreateNodeSkeletonAction.jsonFormat)) + case s: DeleteNodeSkeletonAction => + Json.obj("name" -> "deleteNode", "value" -> Json.toJson(s)(DeleteNodeSkeletonAction.jsonFormat)) + case s: UpdateNodeSkeletonAction => + Json.obj("name" -> "updateNode", "value" -> Json.toJson(s)(UpdateNodeSkeletonAction.jsonFormat)) + case s: CreateEdgeSkeletonAction => + Json.obj("name" -> "createEdge", "value" -> Json.toJson(s)(CreateEdgeSkeletonAction.jsonFormat)) + case s: DeleteEdgeSkeletonAction => + Json.obj("name" -> "deleteEdge", "value" -> Json.toJson(s)(DeleteEdgeSkeletonAction.jsonFormat)) + case s: UpdateTreeGroupsSkeletonAction => + Json.obj("name" -> "updateTreeGroups", "value" -> Json.toJson(s)(UpdateTreeGroupsSkeletonAction.jsonFormat)) + case s: UpdateTracingSkeletonAction => + Json.obj("name" -> "updateSkeletonTracing", "value" -> Json.toJson(s)(UpdateTracingSkeletonAction.jsonFormat)) + case s: UpdateTreeVisibilitySkeletonAction => + Json.obj("name" -> "updateTreeVisibility", + "value" -> Json.toJson(s)(UpdateTreeVisibilitySkeletonAction.jsonFormat)) + case s: UpdateTreeGroupVisibilitySkeletonAction => + Json.obj("name" -> "updateTreeGroupVisibility", + "value" -> Json.toJson(s)(UpdateTreeGroupVisibilitySkeletonAction.jsonFormat)) + case s: UpdateTreeEdgesVisibilitySkeletonAction => + Json.obj("name" -> "updateTreeEdgesVisibility", + "value" -> Json.toJson(s)(UpdateTreeEdgesVisibilitySkeletonAction.jsonFormat)) + case s: UpdateUserBoundingBoxesSkeletonAction => + Json.obj("name" -> "updateUserBoundingBoxesInSkeletonTracing", + "value" -> Json.toJson(s)(UpdateUserBoundingBoxesSkeletonAction.jsonFormat)) + case s: UpdateUserBoundingBoxVisibilitySkeletonAction => + Json.obj("name" -> "updateUserBoundingBoxVisibilityInSkeletonTracing", + "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibilitySkeletonAction.jsonFormat)) + + // Volume + case s: UpdateBucketVolumeAction => + Json.obj("name" -> "updateBucket", "value" -> Json.toJson(s)(UpdateBucketVolumeAction.jsonFormat)) + case s: UpdateTracingVolumeAction => + Json.obj("name" -> "updateVolumeTracing", "value" -> Json.toJson(s)(UpdateTracingVolumeAction.jsonFormat)) + case s: UpdateUserBoundingBoxesVolumeAction => + Json.obj("name" -> "updateUserBoundingBoxesInVolumeTracing", + "value" -> Json.toJson(s)(UpdateUserBoundingBoxesVolumeAction.jsonFormat)) + case s: UpdateUserBoundingBoxVisibilityVolumeAction => + Json.obj("name" -> "updateUserBoundingBoxVisibilityInVolumeTracing", + "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibilityVolumeAction.jsonFormat)) + case s: RemoveFallbackLayerVolumeAction => + Json.obj("name" -> "removeFallbackLayer", "value" -> Json.toJson(s)(RemoveFallbackLayerVolumeAction.jsonFormat)) + case s: ImportVolumeDataVolumeAction => + Json.obj("name" -> "importVolumeTracing", "value" -> Json.toJson(s)(ImportVolumeDataVolumeAction.jsonFormat)) + case s: CreateSegmentVolumeAction => + Json.obj("name" -> "createSegment", "value" -> Json.toJson(s)(CreateSegmentVolumeAction.jsonFormat)) + case s: UpdateSegmentVolumeAction => + Json.obj("name" -> "updateSegment", "value" -> Json.toJson(s)(UpdateSegmentVolumeAction.jsonFormat)) + case s: DeleteSegmentVolumeAction => + Json.obj("name" -> "deleteSegment", "value" -> Json.toJson(s)(DeleteSegmentVolumeAction.jsonFormat)) + case s: UpdateSegmentGroupsVolumeAction => + Json.obj("name" -> "updateSegmentGroups", "value" -> Json.toJson(s)(UpdateSegmentGroupsVolumeAction.jsonFormat)) + case s: UpdateMappingNameVolumeAction => + Json.obj("name" -> "updateMappingName", "value" -> Json.toJson(s)(UpdateMappingNameVolumeAction.jsonFormat)) + case s: AddSegmentIndexVolumeAction => + Json.obj("name" -> "addSegmentIndex", "value" -> Json.toJson(s)(AddSegmentIndexVolumeAction.jsonFormat)) + case s: CompactVolumeUpdateAction => + Json.toJson(s) + + // Editable Mapping + case s: SplitAgglomerateUpdateAction => + Json.obj("name" -> "splitAgglomerate", "value" -> Json.toJson(s)(SplitAgglomerateUpdateAction.jsonFormat)) + case s: MergeAgglomerateUpdateAction => + Json.obj("name" -> "mergeAgglomerate", "value" -> Json.toJson(s)(MergeAgglomerateUpdateAction.jsonFormat)) + + // Annotation + case s: AddLayerAnnotationAction => + Json.obj("name" -> "addLayerToAnnotation", "value" -> Json.toJson(s)(AddLayerAnnotationAction.jsonFormat)) + case s: DeleteLayerAnnotationAction => + Json.obj("name" -> "deleteLayerFromAnnotation", + "value" -> Json.toJson(s)(DeleteLayerAnnotationAction.jsonFormat)) + case s: UpdateLayerMetadataAnnotationAction => + Json.obj("name" -> "updateLayerMetadata", + "value" -> Json.toJson(s)(UpdateLayerMetadataAnnotationAction.jsonFormat)) + case s: UpdateMetadataAnnotationAction => + Json.obj("name" -> "updateMetadataOfAnnotation", + "value" -> Json.toJson(s)(UpdateMetadataAnnotationAction.jsonFormat)) + case s: RevertToVersionAnnotationAction => + Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionAnnotationAction.jsonFormat)) + case s: ResetToBaseAnnotationAction => + Json.obj("name" -> "resetToBase", "value" -> Json.toJson(s)(ResetToBaseAnnotationAction.jsonFormat)) + case s: UpdateTdCameraAnnotationAction => + Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCameraAnnotationAction.jsonFormat)) + } + } +} + +case class UpdateActionGroup(version: Long, + timestamp: Long, + authorId: Option[String], + actions: List[UpdateAction], + stats: Option[JsObject], + info: Option[String], + transactionId: String, + transactionGroupCount: Int, + transactionGroupIndex: Int) { + + def significantChangesCount: Int = actions.count(!_.isViewOnlyChange) + def viewChangesCount: Int = actions.count(_.isViewOnlyChange) +} + +object UpdateActionGroup { + implicit val jsonFormat: OFormat[UpdateActionGroup] = Json.format[UpdateActionGroup] +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala new file mode 100644 index 00000000000..831688fd1d9 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala @@ -0,0 +1,76 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import collections.SequenceUtils +import com.typesafe.scalalogging.LazyLogging + +trait UpdateGroupHandling extends LazyLogging { + + /* + * Regroup update action groups, isolating the update actions that need it. + * (Currently RevertToVersionAnnotationAction and AddLayerAnnotationAction) + * Assumes they are already the only update in their respective group. + * Compare unit test for UpdateGroupHandlingUnitTestSuite + */ + def regroupByIsolationSensitiveActions( + updateActionGroupsWithVersions: List[(Long, List[UpdateAction])]): List[(Long, List[UpdateAction])] = { + val splitGroupLists: List[List[(Long, List[UpdateAction])]] = + SequenceUtils.splitAndIsolate(updateActionGroupsWithVersions.reverse)(actionGroup => + actionGroup._2.exists(updateAction => isIsolationSensitiveAction(updateAction))) + splitGroupLists.flatMap { groupsToConcatenate: List[(Long, List[UpdateAction])] => + concatenateUpdateActionGroups(groupsToConcatenate) + } + } + + private def concatenateUpdateActionGroups( + groups: List[(Long, List[UpdateAction])]): Option[(Long, List[UpdateAction])] = { + val updates = groups.flatMap(_._2) + val targetVersionOpt: Option[Long] = groups.map(_._1).lastOption + targetVersionOpt.map(targetVersion => (targetVersion, updates)) + } + + private def isIsolationSensitiveAction(a: UpdateAction): Boolean = a match { + case _: RevertToVersionAnnotationAction => true + case _: AddLayerAnnotationAction => true + case _ => false + } + + /* + * Iron out reverts in a sequence of update groups. + * Scans for RevertToVersionActions and skips updates as specified by the reverts + * Expects updateGroups as Version-Seq[UpdateAction] tuples, SORTED DESCENDING by version number + * Returns a single Seq of UpdateAction, in to-apply order + * Compare unit test in UpdateGroupHandlingUnitTestSuite + */ + def ironOutReverts(updateGroups: Seq[(Long, Seq[UpdateAction])]): Seq[UpdateAction] = + updateGroups.headOption match { + case None => Seq() // no update groups, return no updates + case Some(firstUpdateGroup) => + val (ironedOutGroups: Seq[Seq[UpdateAction]], _) = + updateGroups.foldLeft[(Seq[Seq[UpdateAction]], Long)]((Seq(), firstUpdateGroup._1)) { + (collectedAndNextVersion: (Seq[Seq[UpdateAction]], Long), updateGroupWithVersion) => + val collected = collectedAndNextVersion._1 + val nextVersion = collectedAndNextVersion._2 + if (updateGroupWithVersion._1 > nextVersion) { + // We have not yet reached nextVersion. Skip to next element, Do not collect, do not change nextVersion + (collected, nextVersion) + } else { + val revertSourceVersionOpt = revertSourceVersionFromUpdates(updateGroupWithVersion._2) + revertSourceVersionOpt match { + // This group is a revert action. Set nextVersion to revertSourceVersion, do not collect this group + case Some(revertSourceVersion) => (collected, revertSourceVersion) + // This group is a normal action. Collect it, decrement nextVersion + // Note: we *prepend* the update group here, meaning the output will go from oldest to newest version + case None => (updateGroupWithVersion._2 +: collected, nextVersion - 1) + } + } + + } + ironedOutGroups.flatten + } + + private def revertSourceVersionFromUpdates(updates: Seq[UpdateAction]): Option[Long] = + updates.flatMap { + case u: RevertToVersionAnnotationAction => Some(u.sourceVersion) + case _ => None + }.headOption +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala new file mode 100644 index 00000000000..925e009ea7d --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -0,0 +1,146 @@ +package com.scalableminds.webknossos.tracingstore.controllers + +import com.google.inject.Inject +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph +import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong +import com.scalableminds.webknossos.datastore.controllers.Controller +import com.scalableminds.webknossos.datastore.services.{EditableMappingSegmentListResult, UserAccessRequest} +import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + EditableMappingService, + MinCutParameters, + NeighborsParameters +} +import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService +import net.liftweb.common.{Box, Empty, Failure, Full} +import play.api.libs.json.Json +import play.api.mvc.{Action, AnyContent, PlayBodyParsers} + +import scala.concurrent.ExecutionContext + +class EditableMappingController @Inject()( + volumeTracingService: VolumeTracingService, + annotationService: TSAnnotationService, + remoteWebknossosClient: TSRemoteWebknossosClient, + accessTokenService: TracingStoreAccessTokenService, + editableMappingService: EditableMappingService)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) + extends Controller { + + def editableMappingInfo(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = + Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { + for { + tracing <- annotationService.findVolume(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId, version) + infoJson = editableMappingService.infoJson(tracingId = tracingId, editableMappingInfo = editableMappingInfo) + } yield Ok(infoJson) + } + } + } + + def segmentIdsForAgglomerate(tracingId: String, agglomerateId: Long): Action[AnyContent] = + Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + agglomerateGraphBox: Box[AgglomerateGraph] <- editableMappingService + .getAgglomerateGraphForId(tracingId, tracing.version, agglomerateId) + .futureBox + segmentIds <- agglomerateGraphBox match { + case Full(agglomerateGraph) => Fox.successful(agglomerateGraph.segments) + case Empty => Fox.successful(List.empty) + case f: Failure => f.toFox + } + agglomerateIdIsPresent = agglomerateGraphBox.isDefined + } yield Ok(Json.toJson(EditableMappingSegmentListResult(segmentIds.toList, agglomerateIdIsPresent))) + } + } + } + + def agglomerateIdsForSegments(tracingId: String, annotationId: String, version: Option[Long]): Action[ListOfLong] = + Action.async(validateProto[ListOfLong]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { + for { + annotation <- annotationService.get(annotationId, version) + tracing <- annotationService.findVolume(annotationId, tracingId, version) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId, version) + relevantMapping: Map[Long, Long] <- editableMappingService.generateCombinedMappingForSegmentIds( + request.body.items.toSet, + editableMappingInfo, + annotation.version, + tracingId, + remoteFallbackLayer) + agglomerateIdsSorted = relevantMapping.toSeq.sortBy(_._1).map(_._2) + } yield Ok(ListOfLong(agglomerateIdsSorted).toByteArray) + } + } + } + + def agglomerateGraphMinCut(tracingId: String): Action[MinCutParameters] = + Action.async(validateJson[MinCutParameters]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId) + edges <- editableMappingService.agglomerateGraphMinCut(tracingId, + tracing.version, + editableMappingInfo, + request.body, + remoteFallbackLayer) + } yield Ok(Json.toJson(edges)) + } + } + } + + def agglomerateGraphNeighbors(tracingId: String): Action[NeighborsParameters] = + Action.async(validateJson[NeighborsParameters]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId) + (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(tracingId, + editableMappingInfo, + tracing.version, + request.body, + remoteFallbackLayer) + } yield Ok(Json.obj("segmentId" -> segmentId, "neighbors" -> Json.toJson(edges))) + } + } + } + + def agglomerateSkeleton(tracingId: String, agglomerateId: Long): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Cannot query agglomerate skeleton for volume annotation" + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId) + remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + agglomerateSkeletonBytes <- editableMappingService.getAgglomerateSkeletonWithFallback(tracingId, + tracing.version, + editableMappingInfo, + remoteFallbackLayer, + agglomerateId) + } yield Ok(agglomerateSkeletonBytes) + } + } +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index f56b6d0d26e..6cc563ef9aa 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -3,26 +3,29 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} +import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.services.UserAccessRequest +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.skeleton._ -import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats +import com.scalableminds.webknossos.tracingstore.tracings.{TracingId, TracingSelector} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} -import net.liftweb.common.Empty import play.api.i18n.Messages import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import scala.concurrent.ExecutionContext -class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingService, - val remoteWebknossosClient: TSRemoteWebknossosClient, - val accessTokenService: TracingStoreAccessTokenService, - val slackNotificationService: TSSlackNotificationService)( +class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracingService, + remoteWebknossosClient: TSRemoteWebknossosClient, + annotationService: TSAnnotationService, + accessTokenService: TracingStoreAccessTokenService, + slackNotificationService: TSSlackNotificationService)( implicit val ec: ExecutionContext, val bodyParsers: PlayBodyParsers) - extends TracingController[SkeletonTracing, SkeletonTracings] { + extends Controller { implicit val tracingsCompanion: SkeletonTracings.type = SkeletonTracings @@ -35,72 +38,102 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer implicit def unpackMultiple(tracings: SkeletonTracings): List[Option[SkeletonTracing]] = tracings.tracings.toList.map(_.tracing) - def mergedFromContents(token: Option[String], persist: Boolean): Action[SkeletonTracings] = - Action.async(validateProto[SkeletonTracings]) { implicit request => + def save(): Action[SkeletonTracing] = Action.async(validateProto[SkeletonTracing]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val tracing = request.body + skeletonTracingService.saveSkeleton(tracing, None, 0).map { newId => + Ok(Json.toJson(newId)) + } + } + } + } + } + + def saveMultiple(): Action[SkeletonTracings] = Action.async(validateProto[SkeletonTracings]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val savedIds = Fox.sequence(request.body.map { tracingOpt: Option[SkeletonTracing] => + tracingOpt match { + case Some(tracing) => skeletonTracingService.saveSkeleton(tracing, None, 0).map(Some(_)) + case _ => Fox.successful(None) + } + }) + savedIds.map(id => Ok(Json.toJson(id))) + } + } + } + } + + def get(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = + Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { - val tracings: List[Option[SkeletonTracing]] = request.body + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { - mergedTracing <- Fox.box2Fox(tracingService.merge(tracings.flatten, MergedVolumeStats.empty(), Empty)) - processedTracing = tracingService.remapTooLargeTreeIds(mergedTracing) - newId <- tracingService.save(processedTracing, None, processedTracing.version, toCache = !persist) - } yield Ok(Json.toJson(newId)) + tracing <- annotationService.findSkeleton(annotationId, tracingId, version) ?~> Messages("tracing.notFound") + } yield Ok(tracing.toByteArray).as(protobufMimeType) } } } - def duplicate(token: Option[String], - tracingId: String, - version: Option[Long], - fromTask: Option[Boolean], - editPosition: Option[String], - editRotation: Option[String], - boundingBox: Option[String]): Action[AnyContent] = - Action.async { implicit request => + def getMultiple: Action[List[Option[TracingSelector]]] = + Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { - tracing <- tracingService.find(tracingId, version, applyUpdates = true) ?~> Messages("tracing.notFound") - editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) - editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) - boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) - newId <- tracingService.duplicate(tracing, - fromTask.getOrElse(false), - editPositionParsed, - editRotationParsed, - boundingBoxParsed) + tracings <- annotationService.findMultipleSkeletons(request.body) } yield { - Ok(Json.toJson(newId)) + Ok(tracings.toByteArray).as(protobufMimeType) } } } } - def updateActionLog(token: Option[String], - tracingId: String, - newestVersion: Option[Long], - oldestVersion: Option[Long]): Action[AnyContent] = Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - updateLog <- tracingService.updateActionLog(tracingId, newestVersion, oldestVersion) - } yield { - Ok(updateLog) + def mergedFromContents: Action[SkeletonTracings] = + Action.async(validateProto[SkeletonTracings]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val tracings: List[Option[SkeletonTracing]] = request.body + for { + mergedTracing <- Fox.box2Fox(skeletonTracingService.merge(tracings.flatten, newVersion = 0L)) + processedTracing = skeletonTracingService.remapTooLargeTreeIds(mergedTracing) + newId <- skeletonTracingService.saveSkeleton(processedTracing, None, processedTracing.version) + } yield Ok(Json.toJson(newId)) } } } - } - def updateActionStatistics(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { - implicit request => + // Used in task creation. History is dropped. Caller is responsible to create and save a matching AnnotationProto object + def duplicate(tracingId: String, + editPosition: Option[String], + editRotation: Option[String], + boundingBox: Option[String]): Action[AnyContent] = + Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - statistics <- tracingService.updateActionStatistics(tracingId) - } yield { - Ok(statistics) + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) + editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) + boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) + newestSourceVersion <- annotationService.currentMaterializableVersion(annotationId) + newTracingId <- annotationService.duplicateSkeletonTracing( + annotationId, + sourceTracingId = tracingId, + sourceVersion = newestSourceVersion, + newTracingId = TracingId.generate, + newVersion = 0, + editPosition = editPositionParsed, + editRotation = editRotationParsed, + boundingBox = boundingBoxParsed, + isFromTask = false + ) + } yield Ok(Json.toJson(newTracingId)) } } } - } + } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala new file mode 100644 index 00000000000..38cf7fe755d --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -0,0 +1,231 @@ +package com.scalableminds.webknossos.tracingstore.controllers + +import collections.SequenceUtils +import com.google.inject.Inject +import com.scalableminds.util.geometry.BoundingBox +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.Annotation.{ + AnnotationLayerProto, + AnnotationLayerTypeProto, + AnnotationProto +} +import com.scalableminds.webknossos.datastore.controllers.Controller +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer +import com.scalableminds.webknossos.datastore.services.UserAccessRequest +import com.scalableminds.webknossos.tracingstore.TracingStoreAccessTokenService +import com.scalableminds.webknossos.tracingstore.annotation.{ + AnnotationTransactionService, + ResetToBaseAnnotationAction, + TSAnnotationService, + UpdateActionGroup +} +import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService +import com.scalableminds.webknossos.tracingstore.tracings._ +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService +import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService +import net.liftweb.common.{Empty, Failure, Full} +import play.api.i18n.Messages +import play.api.libs.json.Json +import play.api.mvc.{Action, AnyContent, PlayBodyParsers} + +import scala.concurrent.ExecutionContext + +class TSAnnotationController @Inject()( + accessTokenService: TracingStoreAccessTokenService, + slackNotificationService: TSSlackNotificationService, + annotationService: TSAnnotationService, + annotationTransactionService: AnnotationTransactionService, + skeletonTracingService: SkeletonTracingService, + volumeTracingService: VolumeTracingService)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) + extends Controller + with KeyValueStoreImplicits { + + def save(annotationId: String, toTemporaryStore: Boolean = false): Action[AnnotationProto] = + Action.async(validateProto[AnnotationProto]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + _ <- annotationService.saveAnnotationProto(annotationId, 0L, request.body, toTemporaryStore) + } yield Ok + } + } + } + + def update(annotationId: String): Action[List[UpdateActionGroup]] = + Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeAnnotation(annotationId)) { + for { + _ <- annotationTransactionService.handleUpdateGroups(annotationId, request.body) + } yield Ok + } + } + } + } + + def updateActionLog(annotationId: String, + newestVersion: Option[Long] = None, + oldestVersion: Option[Long] = None): Action[AnyContent] = Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { + for { + newestMaterializableVersion <- annotationService.currentMaterializableVersion(annotationId) + updateLog <- annotationService.updateActionLog(annotationId, + newestVersion.getOrElse(newestMaterializableVersion), + oldestVersion.getOrElse(0)) + } yield Ok(updateLog) + } + } + } + + def newestVersion(annotationId: String): Action[AnyContent] = Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { + for { + newestVersion <- annotationService.currentMaterializableVersion(annotationId) + } yield JsonOk(Json.obj("version" -> newestVersion)) + } + } + } + + def updateActionStatistics(tracingId: String): Action[AnyContent] = Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + statistics <- annotationService.updateActionStatistics(tracingId) + } yield Ok(statistics) + } + } + } + + def get(annotationId: String, version: Option[Long]): Action[AnyContent] = + Action.async { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { + for { + annotationProto <- annotationService.get(annotationId, version) + } yield Ok(annotationProto.toByteArray).as(protobufMimeType) + } + } + } + } + + def duplicate(annotationId: String, + newAnnotationId: String, + version: Option[Long], + isFromTask: Boolean, + datasetBoundingBox: Option[String]): Action[AnyContent] = + Action.async { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { + for { + datasetBoundingBoxParsed <- Fox.runOptional(datasetBoundingBox)(BoundingBox.fromLiteral) + annotationProto <- annotationService.duplicate(annotationId, + newAnnotationId, + version, + isFromTask, + datasetBoundingBoxParsed) + } yield Ok(annotationProto.toByteArray).as(protobufMimeType) + } + } + } + } + + def resetToBase(annotationId: String): Action[AnyContent] = + Action.async { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + currentVersion <- annotationService.currentMaterializableVersion(annotationId) + _ <- annotationTransactionService.handleSingleUpdateAction(annotationId, + currentVersion, + ResetToBaseAnnotationAction()) + } yield Ok + } + } + } + } + + def mergedFromIds(toTemporaryStore: Boolean, newAnnotationId: String): Action[List[String]] = + Action.async(validateJson[List[String]]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + annotations: Seq[AnnotationProto] <- annotationService.getMultiple(request.body) ?~> Messages( + "annotation.notFound") + skeletonLayers = annotations.flatMap(_.annotationLayers.filter(_.typ == AnnotationLayerTypeProto.Skeleton)) + volumeLayers = annotations.flatMap(_.annotationLayers.filter(_.typ == AnnotationLayerTypeProto.Volume)) + newSkeletonId = TracingId.generate + newVolumeId = TracingId.generate + mergedSkeletonName = SequenceUtils + .findUniqueElement(skeletonLayers.map(_.name)) + .getOrElse(AnnotationLayer.defaultSkeletonLayerName) + mergedVolumeName = SequenceUtils + .findUniqueElement(volumeLayers.map(_.name)) + .getOrElse(AnnotationLayer.defaultVolumeLayerName) + volumeTracings <- annotationService + .findMultipleVolumes(volumeLayers.map { l => + Some(TracingSelector(l.tracingId)) + }) + .map(_.flatten) + mergeEditableMappingsResultBox <- annotationService + .mergeEditableMappings(request.body, + newAnnotationId, + newVolumeId, + volumeTracings.zip(volumeLayers.map(_.tracingId)), + toTemporaryStore) + .futureBox + (newMappingName: Option[String], newTargetVersion: Long) <- mergeEditableMappingsResultBox match { + case Full(targetVersion) => Fox.successful((Some(newVolumeId), targetVersion)) + case Empty => Fox.successful((None, 0L)) + case f: Failure => f.toFox + } + mergedVolumeStats <- volumeTracingService.mergeVolumeData(volumeLayers.map(_.tracingId), + volumeTracings, + newVolumeId, + newVersion = newTargetVersion, + toTemporaryStore) + mergedVolumeOpt <- Fox.runIf(volumeTracings.nonEmpty)( + volumeTracingService + .merge(volumeTracings, mergedVolumeStats, newMappingName, newVersion = newTargetVersion)) + _ <- Fox.runOptional(mergedVolumeOpt)( + volumeTracingService.saveVolume(_, Some(newVolumeId), version = newTargetVersion, toTemporaryStore)) + skeletonTracings <- annotationService + .findMultipleSkeletons(skeletonLayers.map { l => + Some(TracingSelector(l.tracingId)) + }) + .map(_.flatten) + mergedSkeletonOpt <- Fox.runIf(skeletonTracings.nonEmpty)( + skeletonTracingService.merge(skeletonTracings, newVersion = newTargetVersion).toFox) + mergedSkeletonLayerOpt = mergedSkeletonOpt.map( + _ => + AnnotationLayerProto(name = mergedSkeletonName, + tracingId = newSkeletonId, + typ = AnnotationLayerTypeProto.Skeleton)) + mergedVolumeLayerOpt = mergedVolumeOpt.map( + _ => + AnnotationLayerProto(name = mergedVolumeName, + tracingId = newVolumeId, + typ = AnnotationLayerTypeProto.Volume)) + mergedLayers = Seq(mergedSkeletonLayerOpt, mergedVolumeLayerOpt).flatten + firstAnnotation <- annotations.headOption.toFox + mergedAnnotation = firstAnnotation + .withAnnotationLayers(mergedLayers) + .withEarliestAccessibleVersion(newTargetVersion) + .withVersion(newTargetVersion) + _ <- Fox.runOptional(mergedSkeletonOpt)( + skeletonTracingService.saveSkeleton(_, Some(newSkeletonId), version = newTargetVersion, toTemporaryStore)) + _ <- annotationService.saveAnnotationProto(newAnnotationId, + newTargetVersion, + mergedAnnotation, + toTemporaryStore) + } yield Ok(mergedAnnotation.toByteArray).as(protobufMimeType) + } + } + } + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala deleted file mode 100644 index 4644d58e6a1..00000000000 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ /dev/null @@ -1,295 +0,0 @@ -package com.scalableminds.webknossos.tracingstore.controllers - -import com.scalableminds.util.time.Instant -import com.scalableminds.util.tools.Fox -import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} -import com.scalableminds.webknossos.datastore.controllers.Controller -import com.scalableminds.webknossos.datastore.services.UserAccessRequest -import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.{ - TracingSelector, - TracingService, - UpdateAction, - UpdateActionGroup -} -import com.scalableminds.webknossos.tracingstore.{ - TSRemoteWebknossosClient, - TracingStoreAccessTokenService, - TracingUpdatesReport -} -import net.liftweb.common.{Empty, Failure, Full} -import play.api.i18n.Messages -import play.api.libs.json.{Format, Json} -import play.api.mvc.{Action, AnyContent, PlayBodyParsers} -import scalapb.{GeneratedMessage, GeneratedMessageCompanion} - -import scala.concurrent.ExecutionContext -import scala.concurrent.duration._ - -trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends Controller { - - def tracingService: TracingService[T] - - def remoteWebknossosClient: TSRemoteWebknossosClient - - def accessTokenService: TracingStoreAccessTokenService - - def slackNotificationService: TSSlackNotificationService - - implicit val tracingCompanion: GeneratedMessageCompanion[T] = tracingService.tracingCompanion - - implicit val tracingsCompanion: GeneratedMessageCompanion[Ts] - - implicit def unpackMultiple(tracings: Ts): List[Option[T]] - - implicit def packMultiple(tracings: List[T]): Ts - - implicit def packMultipleOpt(tracings: List[Option[T]]): Ts - - implicit val updateActionJsonFormat: Format[UpdateAction[T]] = tracingService.updateActionJsonFormat - - implicit val ec: ExecutionContext - - implicit val bodyParsers: PlayBodyParsers - - override def allowRemoteOrigin: Boolean = true - - def save(token: Option[String]): Action[T] = Action.async(validateProto[T]) { implicit request => - log() { - logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { - val tracing = request.body - tracingService.save(tracing, None, 0).map { newId => - Ok(Json.toJson(newId)) - } - } - } - } - } - - def saveMultiple(token: Option[String]): Action[Ts] = Action.async(validateProto[Ts]) { implicit request => - log() { - logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { - val savedIds = Fox.sequence(request.body.map { tracingOpt: Option[T] => - tracingOpt match { - case Some(tracing) => tracingService.save(tracing, None, 0).map(Some(_)) - case _ => Fox.successful(None) - } - }) - savedIds.map(id => Ok(Json.toJson(id))) - } - } - } - } - - def get(token: Option[String], tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { - implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId, version, applyUpdates = true) ?~> Messages("tracing.notFound") - } yield { - Ok(tracing.toByteArray).as(protobufMimeType) - } - } - } - } - - def getMultiple(token: Option[String]): Action[List[Option[TracingSelector]]] = - Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { - for { - tracings <- tracingService.findMultiple(request.body, applyUpdates = true) - } yield { - Ok(tracings.toByteArray).as(protobufMimeType) - } - } - } - } - - def newestVersion(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), token) { - for { - newestVersion <- tracingService.currentVersion(tracingId) ?~> "annotation.getNewestVersion.failed" - } yield { - JsonOk(Json.obj("version" -> newestVersion)) - } - } - } - } - - def update(token: Option[String], tracingId: String): Action[List[UpdateActionGroup[T]]] = - Action.async(validateJson[List[UpdateActionGroup[T]]]) { implicit request => - log() { - logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId), urlOrHeaderToken(token, request)) { - val updateGroups = request.body - if (updateGroups.forall(_.transactionGroupCount == 1)) { - commitUpdates(tracingId, updateGroups, urlOrHeaderToken(token, request)).map(_ => Ok) - } else { - updateGroups - .foldLeft(tracingService.currentVersion(tracingId)) { (currentCommittedVersionFox, updateGroup) => - handleUpdateGroupForTransaction(tracingId, - currentCommittedVersionFox, - updateGroup, - urlOrHeaderToken(token, request)) - } - .map(_ => Ok) - } - } - } - } - } - - private val transactionGroupExpiry: FiniteDuration = 24 hours - - private def handleUpdateGroupForTransaction(tracingId: String, - previousVersionFox: Fox[Long], - updateGroup: UpdateActionGroup[T], - userToken: Option[String]): Fox[Long] = - for { - previousCommittedVersion: Long <- previousVersionFox - result <- if (previousCommittedVersion + 1 == updateGroup.version) { - if (updateGroup.transactionGroupCount == updateGroup.transactionGroupIndex + 1) { - // Received the last group of this transaction - commitWithPending(tracingId, updateGroup, userToken) - } else { - tracingService - .saveUncommitted(tracingId, - updateGroup.transactionId, - updateGroup.transactionGroupIndex, - updateGroup.version, - updateGroup, - transactionGroupExpiry) - .flatMap( - _ => - tracingService.saveToHandledGroupIdStore(tracingId, - updateGroup.transactionId, - updateGroup.version, - updateGroup.transactionGroupIndex)) - .map(_ => previousCommittedVersion) // no updates have been committed, do not yield version increase - } - } else { - failUnlessAlreadyHandled(updateGroup, tracingId, previousCommittedVersion) - } - } yield result - - // For an update group (that is the last of a transaction), fetch all previous uncommitted for the same transaction - // and commit them all. - private def commitWithPending(tracingId: String, - updateGroup: UpdateActionGroup[T], - userToken: Option[String]): Fox[Long] = - for { - previousActionGroupsToCommit <- tracingService.getAllUncommittedFor(tracingId, updateGroup.transactionId) - _ <- bool2Fox( - previousActionGroupsToCommit - .exists(_.transactionGroupIndex == 0) || updateGroup.transactionGroupCount == 1) ?~> s"Trying to commit a transaction without a group that has transactionGroupIndex 0." - concatenatedGroup = concatenateUpdateGroupsOfTransaction(previousActionGroupsToCommit, updateGroup) - commitResult <- commitUpdates(tracingId, List(concatenatedGroup), userToken) - _ <- tracingService.removeAllUncommittedFor(tracingId, updateGroup.transactionId) - } yield commitResult - - private def concatenateUpdateGroupsOfTransaction(previousActionGroups: List[UpdateActionGroup[T]], - lastActionGroup: UpdateActionGroup[T]): UpdateActionGroup[T] = - if (previousActionGroups.isEmpty) lastActionGroup - else { - val allActionGroups = previousActionGroups :+ lastActionGroup - UpdateActionGroup[T]( - version = lastActionGroup.version, - timestamp = lastActionGroup.timestamp, - authorId = lastActionGroup.authorId, - actions = allActionGroups.flatMap(_.actions), - stats = lastActionGroup.stats, // the latest stats do count - info = lastActionGroup.info, // frontend sets this identically for all groups of transaction - transactionId = f"${lastActionGroup.transactionId}-concatenated", - transactionGroupCount = 1, - transactionGroupIndex = 0, - ) - } - - // Perform version check and commit the passed updates - private def commitUpdates(tracingId: String, - updateGroups: List[UpdateActionGroup[T]], - userToken: Option[String]): Fox[Long] = { - val currentCommittedVersion: Fox[Long] = tracingService.currentVersion(tracingId) - val report = TracingUpdatesReport( - tracingId, - timestamps = updateGroups.map(g => Instant(g.timestamp)), - statistics = updateGroups.flatMap(_.stats).lastOption, - significantChangesCount = updateGroups.map(_.significantChangesCount).sum, - viewChangesCount = updateGroups.map(_.viewChangesCount).sum, - userToken - ) - remoteWebknossosClient.reportTracingUpdates(report).flatMap { _ => - updateGroups.foldLeft(currentCommittedVersion) { (previousVersion, updateGroup) => - previousVersion.flatMap { prevVersion: Long => - if (prevVersion + 1 == updateGroup.version) { - tracingService - .handleUpdateGroup(tracingId, updateGroup, prevVersion, userToken) - .flatMap( - _ => - tracingService.saveToHandledGroupIdStore(tracingId, - updateGroup.transactionId, - updateGroup.version, - updateGroup.transactionGroupIndex)) - .map(_ => updateGroup.version) - } else failUnlessAlreadyHandled(updateGroup, tracingId, prevVersion) - } - } - } - } - - /* If this update group has already been “handled” (successfully saved as either committed or uncommitted), - * ignore it silently. This is in case the frontend sends a retry if it believes a save to be unsuccessful - * despite the backend receiving it just fine. - */ - private def failUnlessAlreadyHandled(updateGroup: UpdateActionGroup[T], - tracingId: String, - previousVersion: Long): Fox[Long] = { - val errorMessage = s"Incorrect version. Expected: ${previousVersion + 1}; Got: ${updateGroup.version}" - for { - _ <- Fox.assertTrue( - tracingService.handledGroupIdStoreContains(tracingId, - updateGroup.transactionId, - updateGroup.version, - updateGroup.transactionGroupIndex)) ?~> errorMessage ~> CONFLICT - } yield updateGroup.version - } - - def mergedFromIds(token: Option[String], persist: Boolean): Action[List[Option[TracingSelector]]] = - Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { - for { - tracingOpts <- tracingService.findMultiple(request.body, applyUpdates = true) ?~> Messages( - "tracing.notFound") - tracingsWithIds = tracingOpts.zip(request.body).flatMap { - case (Some(tracing), Some(selector)) => Some((tracing, selector.tracingId)) - case _ => None - } - newId = tracingService.generateTracingId - mergedVolumeStats <- tracingService.mergeVolumeData(request.body.flatten, - tracingsWithIds.map(_._1), - newId, - newVersion = 0L, - toCache = !persist, - token) - newEditableMappingIdBox <- tracingService - .mergeEditableMappings(tracingsWithIds, urlOrHeaderToken(token, request)) - .futureBox - newEditableMappingIdOpt <- newEditableMappingIdBox match { - case Full(newEditableMappingId) => Fox.successful(Some(newEditableMappingId)) - case Empty => Fox.successful(None) - case f: Failure => f.toFox - } - mergedTracing <- Fox.box2Fox( - tracingService.merge(tracingsWithIds.map(_._1), mergedVolumeStats, newEditableMappingIdOpt)) - _ <- tracingService.save(mergedTracing, Some(newId), version = 0, toCache = !persist) - } yield Ok(Json.toJson(newId)) - } - } - } -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index cc9090c79ed..51d41f24383 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -1,13 +1,13 @@ package com.scalableminds.webknossos.tracingstore.controllers +import collections.SequenceUtils import com.google.inject.Inject import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} -import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.ExtendedTypes.ExtendedString import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph -import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong +import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} +import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto import com.scalableminds.webknossos.datastore.helpers.{ GetSegmentIndexParameters, @@ -22,37 +22,19 @@ import com.scalableminds.webknossos.datastore.models.{ WebknossosDataRequest } import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.datastore.services.{ - EditableMappingSegmentListResult, - FullMeshRequest, - UserAccessRequest -} +import com.scalableminds.webknossos.datastore.services.{FullMeshRequest, UserAccessRequest} +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ - EditableMappingService, - EditableMappingUpdateActionGroup, - MinCutParameters, - NeighborsParameters -} -import com.scalableminds.webknossos.tracingstore.tracings.volume.{ - MergedVolumeStats, - MagRestrictions, - TSFullMeshService, - UpdateMappingNameAction, - VolumeDataZipFormat, - VolumeSegmentIndexService, - VolumeSegmentStatisticsService, - VolumeTracingService -} -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, UpdateActionGroup} +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService +import com.scalableminds.webknossos.tracingstore.tracings.volume._ +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingId, TracingSelector} import com.scalableminds.webknossos.tracingstore.{ TSRemoteDatastoreClient, TSRemoteWebknossosClient, TracingStoreAccessTokenService, - TracingStoreConfig, - TracingUpdatesReport + TracingStoreConfig } -import net.liftweb.common.{Box, Empty, Failure, Full} +import net.liftweb.common.Empty import play.api.i18n.Messages import play.api.libs.Files.TemporaryFile import play.api.libs.json.Json @@ -63,18 +45,20 @@ import java.nio.{ByteBuffer, ByteOrder} import scala.concurrent.ExecutionContext class VolumeTracingController @Inject()( - val tracingService: VolumeTracingService, + val volumeTracingService: VolumeTracingService, val config: TracingStoreConfig, val remoteDataStoreClient: TSRemoteDatastoreClient, val accessTokenService: TracingStoreAccessTokenService, + annotationService: TSAnnotationService, editableMappingService: EditableMappingService, val slackNotificationService: TSSlackNotificationService, val remoteWebknossosClient: TSRemoteWebknossosClient, + annotationTransactionService: AnnotationTransactionService, volumeSegmentStatisticsService: VolumeSegmentStatisticsService, volumeSegmentIndexService: VolumeSegmentIndexService, fullMeshService: TSFullMeshService, val rpc: RPC)(implicit val ec: ExecutionContext, val bodyParsers: PlayBodyParsers) - extends TracingController[VolumeTracing, VolumeTracings] + extends Controller with ProtoGeometryImplicits with KeyValueStoreImplicits { @@ -89,78 +73,140 @@ class VolumeTracingController @Inject()( implicit def unpackMultiple(tracings: VolumeTracings): List[Option[VolumeTracing]] = tracings.tracings.toList.map(_.tracing) - def initialData(token: Option[String], - tracingId: String, - minMag: Option[Int], - maxMag: Option[Int]): Action[AnyContent] = + def save(newTracingId: Option[String]): Action[VolumeTracing] = Action.async(validateProto[VolumeTracing]) { + implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val tracing = request.body + volumeTracingService.saveVolume(tracing, newTracingId, 0).map { newId => + Ok(Json.toJson(newId)) + } + } + } + } + } + + def saveMultiple(): Action[VolumeTracings] = Action.async(validateProto[VolumeTracings]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val savedIds = Fox.sequence(request.body.map { tracingOpt: Option[VolumeTracing] => + tracingOpt match { + case Some(tracing) => volumeTracingService.saveVolume(tracing, None, 0).map(Some(_)) + case _ => Fox.successful(None) + } + }) + savedIds.map(id => Ok(Json.toJson(id))) + } + } + } + } + + def get(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = + Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { + for { + tracing <- annotationService.findVolume(annotationId, tracingId, version) ?~> Messages("tracing.notFound") + } yield Ok(tracing.toByteArray).as(protobufMimeType) + } + } + } + + def getMultiple: Action[List[Option[TracingSelector]]] = + Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + tracings <- annotationService.findMultipleVolumes(request.body) + } yield { + Ok(tracings.toByteArray).as(protobufMimeType) + } + } + } + } + + def initialData(tracingId: String, minMag: Option[Int], maxMag: Option[Int]): Action[AnyContent] = Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") + // The annotation object may not yet exist here. Caller is responsible to save that too. + tracing <- annotationService.findVolumeRaw(tracingId) ?~> Messages("tracing.notFound") magRestrictions = MagRestrictions(minMag, maxMag) - mags <- tracingService.initializeWithData(tracingId, tracing, initialData, magRestrictions, token).toFox - _ <- tracingService.updateMagList(tracingId, tracing, mags) + mags <- volumeTracingService + .initializeWithData(tracingId, tracing.value, initialData, magRestrictions) + .toFox + _ <- volumeTracingService.updateMagList(tracingId, tracing.value, mags) } yield Ok(Json.toJson(tracingId)) } } } } - def mergedFromContents(token: Option[String], persist: Boolean): Action[VolumeTracings] = + def mergedFromContents: Action[VolumeTracings] = Action.async(validateProto[VolumeTracings]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { _ <- Fox.successful(()) tracings = request.body shouldCreateSegmentIndex = volumeSegmentIndexService.shouldCreateSegmentIndexForMerged(tracings.flatten) - mt <- tracingService.merge(tracings.flatten, MergedVolumeStats.empty(shouldCreateSegmentIndex), Empty).toFox + mt <- volumeTracingService + .merge(tracings.flatten, MergedVolumeStats.empty(shouldCreateSegmentIndex), Empty, newVersion = 0L) + .toFox // segment lists for multi-volume uploads are not supported yet, compare https://github.com/scalableminds/webknossos/issues/6887 mergedTracing = mt.copy(segments = List.empty) - newId <- tracingService.save(mergedTracing, None, mergedTracing.version, toCache = !persist) + newId <- volumeTracingService.saveVolume(mergedTracing, None, mergedTracing.version) } yield Ok(Json.toJson(newId)) } } } - def initialDataMultiple(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { - implicit request => + def initialDataMultiple(tracingId: String): Action[AnyContent] = + Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - mags <- tracingService.initializeWithDataMultiple(tracingId, tracing, initialData, token).toFox - _ <- tracingService.updateMagList(tracingId, tracing, mags) + // The annotation object may not yet exist here. Caller is responsible to save that too. + tracing <- annotationService.findVolumeRaw(tracingId) ?~> Messages("tracing.notFound") + mags <- volumeTracingService.initializeWithDataMultiple(tracingId, tracing.value, initialData).toFox + _ <- volumeTracingService.updateMagList(tracingId, tracing.value, mags) } yield Ok(Json.toJson(tracingId)) } } } - } + } - def allDataZip(token: Option[String], - tracingId: String, - volumeDataZipFormat: String, + def allDataZip(tracingId: String, + annotationId: Option[String], version: Option[Long], + volumeDataZipFormat: String, voxelSizeFactor: Option[String], voxelSizeUnit: Option[String]): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + annotationId.map(UserAccessRequest.readAnnotation).getOrElse(UserAccessRequest.readTracing(tracingId))) { for { - tracing <- tracingService.find(tracingId, version) ?~> Messages("tracing.notFound") + _ <- bool2Fox(if (version.isDefined) annotationId.isDefined else true) ?~> "Volume data request with version needs passed annotationId" + annotationIdFilled <- Fox.fillOption(annotationId)( + remoteWebknossosClient.getAnnotationIdForTracing(tracingId)) + tracing <- annotationService.findVolume(annotationIdFilled, tracingId, version) ?~> Messages( + "tracing.notFound") volumeDataZipFormatParsed <- VolumeDataZipFormat.fromString(volumeDataZipFormat).toFox voxelSizeFactorParsedOpt <- Fox.runOptional(voxelSizeFactor)(Vec3Double.fromUriLiteral) voxelSizeUnitParsedOpt <- Fox.runOptional(voxelSizeUnit)(LengthUnit.fromString) voxelSize = voxelSizeFactorParsedOpt.map(voxelSizeParsed => VoxelSize.fromFactorAndUnitWithDefault(voxelSizeParsed, voxelSizeUnitParsedOpt)) - data <- tracingService.allDataZip( + data <- volumeTracingService.allDataZip( tracingId, tracing, volumeDataZipFormatParsed, @@ -171,15 +217,20 @@ class VolumeTracingController @Inject()( } } - def data(token: Option[String], tracingId: String): Action[List[WebknossosDataRequest]] = + def data(tracingId: String, annotationId: String): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - (data, indices) <- if (tracing.getHasEditableMapping) - editableMappingService.volumeData(tracing, tracingId, request.body, urlOrHeaderToken(token, request)) - else tracingService.data(tracingId, tracing, request.body) + requestedVersion <- SequenceUtils + .findUniqueElement(request.body.map(_.version)) + .toFox ?~> "All data requests must request the same volume version" + tracing <- annotationService.findVolume(annotationId, tracingId, requestedVersion) ?~> Messages( + "tracing.notFound") + (data, indices) <- if (tracing.getHasEditableMapping) { + val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.volumeData(mappingLayer, request.body) + } else volumeTracingService.data(tracingId, tracing, request.body) } yield Ok(data).withHeaders(getMissingBucketsHeaders(indices): _*) } } @@ -191,120 +242,38 @@ class VolumeTracingController @Inject()( private def formatMissingBucketList(indices: List[Int]): String = "[" + indices.mkString(", ") + "]" - def duplicate(token: Option[String], - tracingId: String, - fromTask: Option[Boolean], - minMag: Option[Int], - maxMag: Option[Int], - downsample: Option[Boolean], - editPosition: Option[String], - editRotation: Option[String], - boundingBox: Option[String]): Action[AnyContent] = Action.async { implicit request => - log() { - logTime(slackNotificationService.noticeSlowRequest) { - val userToken = urlOrHeaderToken(token, request) - accessTokenService.validateAccess(UserAccessRequest.webknossos, userToken) { - for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - _ = logger.info(s"Duplicating volume tracing $tracingId...") - datasetBoundingBox = request.body.asJson.flatMap(_.validateOpt[BoundingBox].asOpt.flatten) - magRestrictions = MagRestrictions(minMag, maxMag) - editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) - editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) - boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) - remoteFallbackLayerOpt <- Fox.runIf(tracing.getHasEditableMapping)( - tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) - newEditableMappingId <- Fox.runIf(tracing.getHasEditableMapping)( - editableMappingService.duplicate(tracing.mappingName, version = None, remoteFallbackLayerOpt, userToken)) - (newId, newTracing) <- tracingService.duplicate( - tracingId, - tracing, - fromTask.getOrElse(false), - datasetBoundingBox, - magRestrictions, - editPositionParsed, - editRotationParsed, - boundingBoxParsed, - newEditableMappingId, - userToken - ) - _ <- Fox.runIfOptionTrue(downsample)(tracingService.downsample(newId, tracingId, newTracing, userToken)) - } yield Ok(Json.toJson(newId)) - } - } - } - } - - def importVolumeData(token: Option[String], tracingId: String): Action[MultipartFormData[TemporaryFile]] = + def importVolumeData(tracingId: String): Action[MultipartFormData[TemporaryFile]] = Action.async(parse.multipartFormData) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") currentVersion <- request.body.dataParts("currentVersion").headOption.flatMap(_.toIntOpt).toFox zipFile <- request.body.files.headOption.map(f => new File(f.ref.path.toString)).toFox - largestSegmentId <- tracingService.importVolumeData(tracingId, - tracing, - zipFile, - currentVersion, - urlOrHeaderToken(token, request)) + largestSegmentId <- volumeTracingService.importVolumeData(tracingId, tracing, zipFile, currentVersion) + _ <- annotationTransactionService.handleSingleUpdateAction( + annotationId, + tracing.version, + ImportVolumeDataVolumeAction(tracingId, Some(largestSegmentId))) } yield Ok(Json.toJson(largestSegmentId)) } } } - def addSegmentIndex(token: Option[String], tracingId: String, dryRun: Boolean): Action[AnyContent] = - Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) ?~> "tracing.notFound" - currentVersion <- tracingService.currentVersion(tracingId) - before = Instant.now - canAddSegmentIndex <- tracingService.checkIfSegmentIndexMayBeAdded(tracingId, tracing, token) - processedBucketCountOpt <- Fox.runIf(canAddSegmentIndex)( - tracingService.addSegmentIndex(tracingId, - tracing, - currentVersion, - urlOrHeaderToken(token, request), - dryRun)) ?~> "addSegmentIndex.failed" - currentVersionNew <- tracingService.currentVersion(tracingId) - _ <- Fox.runIf(!dryRun)(bool2Fox( - processedBucketCountOpt.isEmpty || currentVersionNew == currentVersion + 1L) ?~> "Version increment failed. Looks like someone edited the annotation layer in the meantime.") - duration = Instant.since(before) - _ = processedBucketCountOpt.foreach { processedBucketCount => - logger.info( - s"Added segment index (dryRun=$dryRun) for tracing $tracingId. Took $duration for $processedBucketCount buckets") - } - } yield Ok - } - } - } - - def updateActionLog(token: Option[String], - tracingId: String, - newestVersion: Option[Long] = None, - oldestVersion: Option[Long] = None): Action[AnyContent] = Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - updateLog <- tracingService.updateActionLog(tracingId, newestVersion, oldestVersion) - } yield Ok(updateLog) - } - } - } - - def requestAdHocMesh(token: Option[String], tracingId: String): Action[WebknossosAdHocMeshRequest] = + def requestAdHocMesh(tracingId: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { // The client expects the ad-hoc mesh as a flat float-array. Three consecutive floats form a 3D point, three // consecutive 3D points (i.e., nine floats) form a triangle. // There are no shared vertices between triangles. - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - (vertices, neighbors) <- if (tracing.getHasEditableMapping) - editableMappingService.createAdHocMesh(tracing, tracingId, request.body, urlOrHeaderToken(token, request)) - else tracingService.createAdHocMesh(tracingId, request.body, urlOrHeaderToken(token, request)) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") + (vertices: Array[Float], neighbors: List[Int]) <- if (tracing.getHasEditableMapping) { + val editableMappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.createAdHocMesh(editableMappingLayer, request.body) + } else volumeTracingService.createAdHocMesh(tracingId, tracing, request.body) } yield { // We need four bytes for each float val responseBuffer = ByteBuffer.allocate(vertices.length * 4).order(ByteOrder.LITTLE_ENDIAN) @@ -314,11 +283,12 @@ class VolumeTracingController @Inject()( } } - def loadFullMeshStl(token: Option[String], tracingId: String): Action[FullMeshRequest] = + def loadFullMeshStl(tracingId: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - data: Array[Byte] <- fullMeshService.loadFor(token: Option[String], tracingId, request.body) ?~> "mesh.file.loadChunk.failed" + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + data: Array[Byte] <- fullMeshService.loadFor(annotationId, tracingId, request.body) ?~> "mesh.file.loadChunk.failed" } yield Ok(data) } } @@ -329,265 +299,64 @@ class VolumeTracingController @Inject()( private def formatNeighborList(neighbors: List[Int]): String = "[" + neighbors.mkString(", ") + "]" - def findData(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + def findData(tracingId: String): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - positionOpt <- tracingService.findData(tracingId) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + positionOpt <- volumeTracingService.findData(tracingId, tracing) } yield { Ok(Json.obj("position" -> positionOpt, "mag" -> positionOpt.map(_ => Vec3Int.ones))) } } } - def agglomerateSkeleton(token: Option[String], tracingId: String, agglomerateId: Long): Action[AnyContent] = - Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Cannot query agglomerate skeleton for volume annotation" - mappingName <- tracing.mappingName ?~> "annotation.agglomerateSkeleton.noMappingSet" - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - agglomerateSkeletonBytes <- editableMappingService.getAgglomerateSkeletonWithFallback( - mappingName, - remoteFallbackLayer, - agglomerateId, - urlOrHeaderToken(token, request)) - } yield Ok(agglomerateSkeletonBytes) - } - } - - def makeMappingEditable(token: Option[String], tracingId: String): Action[AnyContent] = - Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - tracingMappingName <- tracing.mappingName ?~> "annotation.noMappingSet" - _ <- assertMappingIsNotLocked(tracing) - _ <- bool2Fox(tracingService.volumeBucketsAreEmpty(tracingId)) ?~> "annotation.volumeBucketsNotEmpty" - (editableMappingId, editableMappingInfo) <- editableMappingService.create( - baseMappingName = tracingMappingName) - volumeUpdate = UpdateMappingNameAction(Some(editableMappingId), - isEditable = Some(true), - isLocked = Some(true), - actionTimestamp = Some(System.currentTimeMillis())) - _ <- tracingService.handleUpdateGroup( - tracingId, - UpdateActionGroup[VolumeTracing](tracing.version + 1, - System.currentTimeMillis(), - None, - List(volumeUpdate), - None, - None, - "dummyTransactionId", - 1, - 0), - tracing.version, - urlOrHeaderToken(token, request) - ) - infoJson <- editableMappingService.infoJson(tracingId = tracingId, - editableMappingId = editableMappingId, - editableMappingInfo = editableMappingInfo, - version = Some(0L)) - } yield Ok(infoJson) - } - } - } - - private def assertMappingIsNotLocked(volumeTracing: VolumeTracing): Fox[Unit] = - bool2Fox(!volumeTracing.mappingIsLocked.getOrElse(false)) ?~> "annotation.mappingIsLocked" - - def agglomerateGraphMinCut(token: Option[String], tracingId: String): Action[MinCutParameters] = - Action.async(validateJson[MinCutParameters]) { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - edges <- editableMappingService.agglomerateGraphMinCut(request.body, remoteFallbackLayer, token) - } yield Ok(Json.toJson(edges)) - } - } - } - - def agglomerateGraphNeighbors(token: Option[String], tracingId: String): Action[NeighborsParameters] = - Action.async(validateJson[NeighborsParameters]) { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(request.body, - remoteFallbackLayer, - token) - } yield Ok(Json.obj("segmentId" -> segmentId, "neighbors" -> Json.toJson(edges))) - } - } - } - - def updateEditableMapping(token: Option[String], tracingId: String): Action[List[EditableMappingUpdateActionGroup]] = - Action.async(validateJson[List[EditableMappingUpdateActionGroup]]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - mappingName <- tracing.mappingName.toFox - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" - currentVersion <- editableMappingService.getClosestMaterializableVersionOrZero(mappingName, None) - _ <- bool2Fox(request.body.length == 1) ?~> "Editable mapping update request must contain exactly one update group" - updateGroup <- request.body.headOption.toFox - _ <- bool2Fox(updateGroup.version == currentVersion + 1) ?~> "version mismatch" - report = TracingUpdatesReport( - tracingId, - timestamps = List(Instant(updateGroup.timestamp)), - statistics = None, - significantChangesCount = updateGroup.actions.length, - viewChangesCount = 0, - urlOrHeaderToken(token, request) - ) - _ <- remoteWebknossosClient.reportTracingUpdates(report) - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - _ <- editableMappingService.update(mappingName, - updateGroup, - updateGroup.version, - remoteFallbackLayer, - urlOrHeaderToken(token, request)) - } yield Ok - } - } - - def editableMappingUpdateActionLog(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { - implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - mappingName <- tracing.mappingName.toFox - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" - updateLog <- editableMappingService.updateActionLog(mappingName) - } yield Ok(updateLog) - } - } - } - - def editableMappingInfo(token: Option[String], tracingId: String, version: Option[Long]): Action[AnyContent] = - Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - mappingName <- tracing.mappingName.toFox - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - editableMappingInfo <- editableMappingService.getInfo(mappingName, - version, - remoteFallbackLayer, - urlOrHeaderToken(token, request)) - infoJson <- editableMappingService.infoJson(tracingId = tracingId, - editableMappingId = mappingName, - editableMappingInfo = editableMappingInfo, - version = version) - } yield Ok(infoJson) - } - } - } - - def editableMappingAgglomerateIdsForSegments(token: Option[String], tracingId: String): Action[ListOfLong] = - Action.async(validateProto[ListOfLong]) { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - editableMappingId <- tracing.mappingName.toFox - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - (editableMappingInfo, editableMappingVersion) <- editableMappingService.getInfoAndActualVersion( - editableMappingId, - requestedVersion = None, - remoteFallbackLayer = remoteFallbackLayer, - userToken = urlOrHeaderToken(token, request)) - relevantMapping: Map[Long, Long] <- editableMappingService.generateCombinedMappingForSegmentIds( - request.body.items.toSet, - editableMappingInfo, - editableMappingVersion, - editableMappingId, - remoteFallbackLayer, - urlOrHeaderToken(token, request)) - agglomerateIdsSorted = relevantMapping.toSeq.sortBy(_._1).map(_._2) - } yield Ok(ListOfLong(agglomerateIdsSorted).toByteArray) - } - } - } - - def editableMappingSegmentIdsForAgglomerate(token: Option[String], - tracingId: String, - agglomerateId: Long): Action[AnyContent] = Action.async { - implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - mappingName <- tracing.mappingName.toFox - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - agglomerateGraphBox: Box[AgglomerateGraph] <- editableMappingService - .getAgglomerateGraphForId(mappingName, - agglomerateId, - remoteFallbackLayer, - urlOrHeaderToken(token, request)) - .futureBox - segmentIds <- agglomerateGraphBox match { - case Full(agglomerateGraph) => Fox.successful(agglomerateGraph.segments) - case Empty => Fox.successful(List.empty) - case f: Failure => f.toFox - } - agglomerateIdIsPresent = agglomerateGraphBox.isDefined - } yield Ok(Json.toJson(EditableMappingSegmentListResult(segmentIds.toList, agglomerateIdIsPresent))) - } - } - } - - def getSegmentVolume(token: Option[String], tracingId: String): Action[SegmentStatisticsParameters] = + def getSegmentVolume(tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) - mappingName <- tracingService.baseMappingName(tracing) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) segmentVolumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => - volumeSegmentStatisticsService.getSegmentVolume(tracingId, + volumeSegmentStatisticsService.getSegmentVolume(annotationId, + tracingId, segmentId, request.body.mag, mappingName, - request.body.additionalCoordinates, - urlOrHeaderToken(token, request)) + request.body.additionalCoordinates) } } yield Ok(Json.toJson(segmentVolumes)) } } - def getSegmentBoundingBox(token: Option[String], tracingId: String): Action[SegmentStatisticsParameters] = + def getSegmentBoundingBox(tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) - mappingName <- tracingService.baseMappingName(tracing) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) segmentBoundingBoxes: List[BoundingBox] <- Fox.serialCombined(request.body.segmentIds) { segmentId => - volumeSegmentStatisticsService.getSegmentBoundingBox(tracingId, + volumeSegmentStatisticsService.getSegmentBoundingBox(annotationId, + tracingId, segmentId, request.body.mag, mappingName, - request.body.additionalCoordinates, - urlOrHeaderToken(token, request)) + request.body.additionalCoordinates) } } yield Ok(Json.toJson(segmentBoundingBoxes)) } } - def getSegmentIndex(token: Option[String], tracingId: String, segmentId: Long): Action[GetSegmentIndexParameters] = + def getSegmentIndex(tracingId: String, segmentId: Long): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - fallbackLayer <- tracingService.getFallbackLayer(tracingId) - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - mappingName <- tracingService.baseMappingName(tracing) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId, tracing) + mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) _ <- bool2Fox(DataLayer.bucketSize <= request.body.cubeSize) ?~> "cubeSize must be at least one bucket (32³)" bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( @@ -598,8 +367,7 @@ class VolumeTracingController @Inject()( additionalCoordinates = request.body.additionalCoordinates, additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), mappingName = mappingName, - editableMappingTracingId = tracingService.editableMappingTracingId(tracing, tracingId), - userToken = urlOrHeaderToken(token, request) + editableMappingTracingId = volumeTracingService.editableMappingTracingId(tracing, tracingId) ) bucketPositionsForCubeSize = bucketPositionsRaw.values .map(vec3IntFromProto) @@ -611,4 +379,40 @@ class VolumeTracingController @Inject()( } } + // Used in task creation. History is dropped. Caller is responsible to create and save a matching AnnotationProto object + def duplicate(tracingId: String, + minMag: Option[Int], + maxMag: Option[Int], + editPosition: Option[String], + editRotation: Option[String], + boundingBox: Option[String]): Action[AnyContent] = + Action.async { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) + editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) + boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) + magRestrictions = MagRestrictions(minMag, maxMag) + newestSourceVersion <- annotationService.currentMaterializableVersion(annotationId) + newTracingId <- annotationService.duplicateVolumeTracing( + annotationId, + sourceTracingId = tracingId, + sourceVersion = newestSourceVersion, + newTracingId = TracingId.generate, + newVersion = 0, + editPosition = editPositionParsed, + editRotation = editRotationParsed, + boundingBox = boundingBoxParsed, + datasetBoundingBox = None, + isFromTask = false, + magRestrictions = magRestrictions + ) + } yield Ok(Json.toJson(newTracingId)) + } + } + } + } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala index 64094d38910..00359960781 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.mvc.ExtendedController import com.scalableminds.util.tools.{Fox, FoxImplicits} @@ -30,6 +31,7 @@ import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, WebknossosDataRequest} import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataFormat, DataLayer, ElementClass} import com.scalableminds.webknossos.datastore.services.UserAccessRequest +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService import com.scalableminds.webknossos.tracingstore.{ @@ -47,6 +49,7 @@ class VolumeTracingZarrStreamingController @Inject()( tracingService: VolumeTracingService, accessTokenService: TracingStoreAccessTokenService, editableMappingService: EditableMappingService, + annotationService: TSAnnotationService, remoteDataStoreClient: TSRemoteDatastoreClient, remoteWebknossosClient: TSRemoteWebknossosClient)(implicit ec: ExecutionContext) extends ExtendedController @@ -56,11 +59,12 @@ class VolumeTracingZarrStreamingController @Inject()( override def defaultErrorCode: Int = NOT_FOUND - def volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int): Action[AnyContent] = + def volumeTracingFolderContent(tracingId: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto) additionalFiles = if (zarrVersion == 2) List(NgffMetadata.FILENAME_DOT_ZATTRS, NgffGroupHeader.FILENAME_DOT_ZGROUP) @@ -75,11 +79,12 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def volumeTracingFolderContentJson(token: Option[String], tracingId: String, zarrVersion: Int): Action[AnyContent] = + def volumeTracingFolderContentJson(tracingId: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto(_).toMagLiteral(allowScalar = true)) additionalFiles = if (zarrVersion == 2) List(NgffMetadata.FILENAME_DOT_ZATTRS, NgffGroupHeader.FILENAME_DOT_ZGROUP) @@ -88,15 +93,12 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def volumeTracingMagFolderContent(token: Option[String], - tracingId: String, - mag: String, - zarrVersion: Int): Action[AnyContent] = + def volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND @@ -111,15 +113,12 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def volumeTracingMagFolderContentJson(token: Option[String], - tracingId: String, - mag: String, - zarrVersion: Int): Action[AnyContent] = + def volumeTracingMagFolderContentJson(tracingId: String, mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND @@ -128,15 +127,14 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def zArray(token: Option[String], tracingId: String, mag: String): Action[AnyContent] = Action.async { - implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + def zArray(tracingId: String, mag: String): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto) - magParsed <- Vec3Int - .fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND + magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND cubeLength = DataLayer.bucketLength @@ -162,17 +160,17 @@ class VolumeTracingZarrStreamingController @Inject()( order = ArrayOrder.F) } yield Ok(Json.toJson(zarrHeader)) } - } + } - def zarrJsonForMag(token: Option[String], tracingId: String, mag: String): Action[AnyContent] = Action.async { - implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + def zarrJsonForMag(tracingId: String, mag: String): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto) - magParsed <- Vec3Int - .fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND + magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND additionalAxes = AdditionalAxis.fromProtos(tracing.additionalAxes) @@ -209,10 +207,10 @@ class VolumeTracingZarrStreamingController @Inject()( ) } yield Ok(Json.toJson(zarrHeader)) } - } + } - def zGroup(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + def zGroup(tracingId: String): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { Future(Ok(Json.toJson(NgffGroupHeader(zarr_format = 2)))) } } @@ -223,13 +221,12 @@ class VolumeTracingZarrStreamingController @Inject()( * Used by zarr-streaming. */ def zAttrs( - token: Option[String], tracingId: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto) dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) ~> NOT_FOUND omeNgffHeader = NgffMetadata.fromNameVoxelSizeAndMags(tracingId, @@ -240,13 +237,12 @@ class VolumeTracingZarrStreamingController @Inject()( } def zarrJson( - token: Option[String], tracingId: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND sortedExistingMags = tracing.mags.map(vec3IntFromProto).toList.sortBy(_.maxDim) dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) ~> NOT_FOUND omeNgffHeader = NgffMetadataV0_5.fromNameVoxelSizeAndMags(tracingId, @@ -258,15 +254,12 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def zarrSource(token: Option[String], - tracingId: String, - tracingName: Option[String], - zarrVersion: Int): Action[AnyContent] = + def zarrSource(tracingId: String, tracingName: Option[String], zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND zarrLayer = ZarrSegmentationLayer( name = tracingName.getOrElse(tracingId), largestSegmentId = tracing.largestSegmentId, @@ -281,12 +274,13 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def rawZarrCube(token: Option[String], tracingId: String, mag: String, coordinates: String): Action[AnyContent] = + def rawZarrCube(tracingId: String, mag: String, coordinates: String): Action[AnyContent] = Action.async { implicit request => { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND @@ -306,9 +300,10 @@ class VolumeTracingZarrStreamingController @Inject()( version = None, additionalCoordinates = additionalCoordinates ) - (data, missingBucketIndices) <- if (tracing.getHasEditableMapping) - editableMappingService.volumeData(tracing, tracingId, List(wkRequest), urlOrHeaderToken(token, request)) - else tracingService.data(tracingId, tracing, List(wkRequest)) + (data, missingBucketIndices) <- if (tracing.getHasEditableMapping) { + val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.volumeData(mappingLayer, List(wkRequest)) + } else tracingService.data(tracingId, tracing, List(wkRequest)) dataWithFallback <- getFallbackLayerDataIfEmpty(tracing, tracingId, data, @@ -316,22 +311,21 @@ class VolumeTracingZarrStreamingController @Inject()( magParsed, Vec3Int(x, y, z), cubeSize, - additionalCoordinates, - urlOrHeaderToken(token, request)) ~> NOT_FOUND + additionalCoordinates) ~> NOT_FOUND } yield Ok(dataWithFallback) } } } - private def getFallbackLayerDataIfEmpty(tracing: VolumeTracing, - tracingId: String, - data: Array[Byte], - missingBucketIndices: List[Int], - mag: Vec3Int, - position: Vec3Int, - cubeSize: Int, - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - urlToken: Option[String]): Fox[Array[Byte]] = + private def getFallbackLayerDataIfEmpty( + tracing: VolumeTracing, + tracingId: String, + data: Array[Byte], + missingBucketIndices: List[Int], + mag: Vec3Int, + position: Vec3Int, + cubeSize: Int, + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit tc: TokenContext): Fox[Array[Byte]] = if (missingBucketIndices.nonEmpty) { for { remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) ?~> "No data at coordinates, no fallback layer defined" @@ -345,8 +339,7 @@ class VolumeTracingZarrStreamingController @Inject()( additionalCoordinates = additionalCoordinates ) (fallbackData, fallbackMissingBucketIndices) <- remoteDataStoreClient.getData(remoteFallbackLayer, - List(request), - urlToken) + List(request)) _ <- bool2Fox(fallbackMissingBucketIndices.isEmpty) ?~> "No data at coordinations in fallback layer" } yield fallbackData } else Fox.successful(data) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala index 9a50f4c61a3..00916f63aae 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.option2Fox @@ -36,10 +37,9 @@ trait FallbackDataHelper { datasetId <- remoteWebknossosClient.getDataSourceIdForTracing(tracingId) } yield RemoteFallbackLayer(datasetId.organizationId, datasetId.directoryName, layerName, tracing.elementClass) - def getFallbackDataFromDatastore( - remoteFallbackLayer: RemoteFallbackLayer, - dataRequests: List[WebknossosDataRequest], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[(Array[Byte], List[Int])] = - fallbackDataCache.getOrLoad(FallbackDataKey(remoteFallbackLayer, dataRequests, userToken), - k => remoteDatastoreClient.getData(k.remoteFallbackLayer, k.dataRequests, k.userToken)) + def getFallbackDataFromDatastore(remoteFallbackLayer: RemoteFallbackLayer, dataRequests: List[WebknossosDataRequest])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[(Array[Byte], List[Int])] = + fallbackDataCache.getOrLoad(FallbackDataKey(remoteFallbackLayer, dataRequests, tc.userTokenOpt), + k => remoteDatastoreClient.getData(k.remoteFallbackLayer, k.dataRequests)) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala new file mode 100644 index 00000000000..68f866653ae --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala @@ -0,0 +1,94 @@ +package com.scalableminds.webknossos.tracingstore.tracings + +import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.bool2Fox +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore +import scalapb.GeneratedMessageCompanion + +import javax.inject.Inject +import scala.concurrent.ExecutionContext +import scala.concurrent.duration.DurationInt + +// This services holds temporary stores, meant for temporary tracings only (e.g. compound projects) +// They cannot be used for download or updating/versioning +class TemporaryTracingService @Inject()( + skeletonStore: TemporaryTracingStore[SkeletonTracing], + volumeStore: TemporaryTracingStore[VolumeTracing], + volumeDataStore: TemporaryTracingStore[Array[Byte]], + annotationStore: TemporaryTracingStore[AnnotationProto], + temporaryTracingIdStore: TracingStoreRedisStore)(implicit ec: ExecutionContext) { + + implicit def skeletonTracingCompanion: GeneratedMessageCompanion[SkeletonTracing] = SkeletonTracing + implicit def volumeTracingCompanion: GeneratedMessageCompanion[VolumeTracing] = VolumeTracing + implicit def annotationProtoCompanion: GeneratedMessageCompanion[AnnotationProto] = AnnotationProto + + // this should be longer than maxCacheTime in webknossos/AnnotationStore + // so that the references saved there remain valid throughout their life + private val temporaryStoreTimeout = 70 minutes + + // the information that a tracing is/was temporary needs to be stored longer + // to provide useful error messages to the user if the temporary tracing is no longer present + private val temporaryIdStoreTimeout = 10 days + + private def temporaryTracingIdKey(tracingId: String) = + s"temporaryTracingId___$tracingId" + + private def temporaryAnnotationIdKey(tracingId: String) = + s"temporaryTracingId___$tracingId" + + def getAnnotation(annotationId: String): Fox[AnnotationProto] = annotationStore.get(annotationId) + + def getVolume(tracingId: String): Fox[VolumeTracing] = volumeStore.get(tracingId) + + def getSkeleton(tracingId: String): Fox[SkeletonTracing] = skeletonStore.get(tracingId) + + def getVolumeBucket(bucketKey: String): Fox[Array[Byte]] = + volumeDataStore.get(bucketKey) + + def getAllVolumeBucketsWithPrefix(bucketPrefix: String): collection.Map[String, Array[Byte]] = + volumeDataStore.getAllConditionalWithKey(key => key.startsWith(bucketPrefix)) + + def saveSkeleton(tracingId: String, skeletonTracing: SkeletonTracing): Fox[Unit] = { + skeletonStore.insert(tracingId, skeletonTracing, Some(temporaryStoreTimeout)) + registerTracingId(tracingId) + Fox.successful(()) + } + + def saveVolume(tracingId: String, volumeTracing: VolumeTracing): Fox[Unit] = { + volumeStore.insert(tracingId, volumeTracing, Some(temporaryStoreTimeout)) + registerTracingId(tracingId) + Fox.successful(()) + } + + def saveVolumeBucket(bucketKey: String, bucketData: Array[Byte]): Fox[Unit] = { + volumeDataStore.insert(bucketKey, bucketData, Some(temporaryStoreTimeout)) + Fox.successful(()) + } + + def saveAnnotationProto(annotationId: String, annotationProto: AnnotationProto): Fox[Unit] = { + annotationStore.insert(annotationId, annotationProto, Some(temporaryStoreTimeout)) + registerAnnotationId(annotationId) + Fox.successful(()) + } + + def isTemporaryAnnotation(annotationId: String): Fox[Boolean] = + temporaryTracingIdStore.contains(temporaryAnnotationIdKey(annotationId)) + + def isTemporaryTracing(tracingId: String): Fox[Boolean] = + temporaryTracingIdStore.contains(temporaryTracingIdKey(tracingId)) + + def assertTracingStillPresent(tracingId: String)(implicit ec: ExecutionContext): Fox[Unit] = + for { + _ <- bool2Fox(volumeStore.contains(tracingId)) ?~> "Temporary Volume Tracing expired" + } yield () + + private def registerTracingId(tracingId: String) = + temporaryTracingIdStore.insertKey(temporaryTracingIdKey(tracingId), Some(temporaryIdStoreTimeout)) + + private def registerAnnotationId(annotationId: String) = + temporaryTracingIdStore.insertKey(temporaryAnnotationIdKey(annotationId), Some(temporaryIdStoreTimeout)) + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala index 1e96f6c03bb..76be451e007 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala @@ -22,16 +22,12 @@ class TracingDataStore @Inject()(config: TracingStoreConfig, lazy val skeletons = new FossilDBClient("skeletons", config, slackNotificationService) - lazy val skeletonUpdates = new FossilDBClient("skeletonUpdates", config, slackNotificationService) - lazy val volumes = new FossilDBClient("volumes", config, slackNotificationService) lazy val volumeData = new FossilDBClient("volumeData", config, slackNotificationService) lazy val volumeSegmentIndex = new FossilDBClient("volumeSegmentIndex", config, slackNotificationService) - lazy val volumeUpdates = new FossilDBClient("volumeUpdates", config, slackNotificationService) - lazy val editableMappingsInfo = new FossilDBClient("editableMappingsInfo", config, slackNotificationService) lazy val editableMappingsAgglomerateToGraph = @@ -40,19 +36,20 @@ class TracingDataStore @Inject()(config: TracingStoreConfig, lazy val editableMappingsSegmentToAgglomerate = new FossilDBClient("editableMappingsSegmentToAgglomerate", config, slackNotificationService) - lazy val editableMappingUpdates = new FossilDBClient("editableMappingUpdates", config, slackNotificationService) + lazy val annotations = new FossilDBClient("annotations", config, slackNotificationService) + + lazy val annotationUpdates = new FossilDBClient("annotationUpdates", config, slackNotificationService) private def shutdown(): Unit = { healthClient.shutdown() skeletons.shutdown() - skeletonUpdates.shutdown() + annotationUpdates.shutdown() + annotations.shutdown() volumes.shutdown() volumeData.shutdown() - volumeUpdates.shutdown() editableMappingsInfo.shutdown() editableMappingsAgglomerateToGraph.shutdown() editableMappingsSegmentToAgglomerate.shutdown() - editableMappingUpdates.shutdown() volumeSegmentIndex.shutdown() () } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingId.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingId.scala new file mode 100644 index 00000000000..9c6a1af49eb --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingId.scala @@ -0,0 +1,11 @@ +package com.scalableminds.webknossos.tracingstore.tracings + +import java.util.UUID + +object TracingId { + + def generate: String = UUID.randomUUID.toString + + lazy val dummy: String = "dummyTracingId" + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingMigrationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingMigrationService.scala index 795e5e1ec9f..ae10d7efb7b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingMigrationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingMigrationService.scala @@ -22,52 +22,48 @@ trait TracingMigrationService[T <: GeneratedMessage] extends FoxImplicits { implicit protected def ec: ExecutionContext // Each migration transforms a tracing and additionally returns whether the tracing was modified - protected def migrations: List[T => Fox[(T, Boolean)]] + protected def migrations: List[T => Fox[T]] - def migrateTracing(tracing: Fox[T]): Fox[(T, Boolean)] = { - def migrateIter(tracingAndChanged: Fox[(T, Boolean)], migrations: List[T => Fox[(T, Boolean)]]): Fox[(T, Boolean)] = + def migrateTracing(tracing: T): Fox[T] = { + def migrateIter(tracingFox: Fox[T], migrations: List[T => Fox[T]]): Fox[T] = migrations match { - case List() => tracingAndChanged + case List() => tracingFox case head :: tail => - tracingAndChanged.futureBox.flatMap { - case Full((tracing, hasChangedPrev)) => - migrateIter(head(tracing).map(t => (t._1, hasChangedPrev || t._2)), tail) + tracingFox.futureBox.flatMap { + case Full(tracing) => + migrateIter(head(tracing), tail) case x => box2Fox(x) } } - migrateIter(tracing.map((_, false)), migrations) + migrateIter(Fox.successful(tracing), migrations) } } class SkeletonTracingMigrationService @Inject()()(implicit val ec: ExecutionContext) extends TracingMigrationService[SkeletonTracing] with ColorGenerator { - override protected val migrations: List[SkeletonTracing => Fox[(SkeletonTracing, Boolean)]] = List( - removeSingleUserBoundingBox) + override protected val migrations: List[SkeletonTracing => Fox[SkeletonTracing]] = List(removeSingleUserBoundingBox) - private def removeSingleUserBoundingBox(tracing: SkeletonTracing): Fox[(SkeletonTracing, Boolean)] = { + private def removeSingleUserBoundingBox(tracing: SkeletonTracing): Fox[SkeletonTracing] = { val newUserBoundingBox: Option[ProtoBox] = tracing.userBoundingBox.map { bb => val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 ProtoBox(newId, color = Some(getRandomColor), boundingBox = bb) } - Fox.successful( - (tracing.clearUserBoundingBox.addAllUserBoundingBoxes(newUserBoundingBox), tracing.userBoundingBox.isDefined)) + Fox.successful(tracing.clearUserBoundingBox.addAllUserBoundingBoxes(newUserBoundingBox)) } } class VolumeTracingMigrationService @Inject()()(implicit val ec: ExecutionContext) extends TracingMigrationService[VolumeTracing] with ColorGenerator { - override protected val migrations: List[VolumeTracing => Fox[(VolumeTracing, Boolean)]] = List( - removeSingleUserBoundingBox) + override protected val migrations: List[VolumeTracing => Fox[VolumeTracing]] = List(removeSingleUserBoundingBox) - private def removeSingleUserBoundingBox(tracing: VolumeTracing): Fox[(VolumeTracing, Boolean)] = { + private def removeSingleUserBoundingBox(tracing: VolumeTracing): Fox[VolumeTracing] = { val newUserBoundingBox: Option[ProtoBox] = tracing.userBoundingBox.map { bb => val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 ProtoBox(newId, color = Some(getRandomColor), boundingBox = bb) } - Fox.successful( - (tracing.clearUserBoundingBox.addAllUserBoundingBoxes(newUserBoundingBox), tracing.userBoundingBox.isDefined)) + Fox.successful(tracing.clearUserBoundingBox.addAllUserBoundingBoxes(newUserBoundingBox)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala deleted file mode 100644 index 40bc6e1a123..00000000000 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ /dev/null @@ -1,194 +0,0 @@ -package com.scalableminds.webknossos.tracingstore.tracings - -import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} -import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore -import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType -import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats -import com.typesafe.scalalogging.LazyLogging -import play.api.http.Status.CONFLICT -import net.liftweb.common.Box -import play.api.i18n.MessagesProvider -import play.api.libs.json._ -import scalapb.{GeneratedMessage, GeneratedMessageCompanion} - -import java.util.UUID -import scala.concurrent.ExecutionContext -import scala.concurrent.duration._ - -object TracingIds { - val dummyTracingId: String = "dummyTracingId" -} - -trait TracingService[T <: GeneratedMessage] - extends KeyValueStoreImplicits - with FoxImplicits - with LazyLogging - with ColorGenerator - with BoundingBoxMerger { - - implicit val ec: ExecutionContext - - def tracingType: TracingType - - def tracingStore: FossilDBClient - - def temporaryTracingStore: TemporaryTracingStore[T] - - def temporaryTracingIdStore: TracingStoreRedisStore - - def tracingMigrationService: TracingMigrationService[T] - - def dummyTracing: T - - val handledGroupIdStore: TracingStoreRedisStore - - val uncommittedUpdatesStore: TracingStoreRedisStore - - implicit def tracingCompanion: GeneratedMessageCompanion[T] - - implicit val updateActionJsonFormat: Format[UpdateAction[T]] - - // this should be longer than maxCacheTime in webknossos/AnnotationStore - // so that the references saved there remain valid throughout their life - private val temporaryStoreTimeout = 70 minutes - - // the information that a tracing is/was temporary needs to be stored longer - // to provide useful error messages to the user if the temporary tracing is no longer present - private val temporaryIdStoreTimeout = 10 days - - private val handledGroupCacheExpiry: FiniteDuration = 24 hours - - def currentVersion(tracingId: String): Fox[Long] - - def currentVersion(tracing: T): Long - - private def transactionGroupKey(tracingId: String, transactionId: String, transactionGroupIndex: Int, version: Long) = - s"transactionGroup___${tracingId}___${transactionId}___${transactionGroupIndex}___$version" - - protected def temporaryIdKey(tracingId: String) = - s"temporaryTracingId___$tracingId" - - private def patternFor(tracingId: String, transactionId: String) = - s"transactionGroup___${tracingId}___${transactionId}___*" - - def saveUncommitted(tracingId: String, - transactionId: String, - transactionGroupIndex: Int, - version: Long, - updateGroup: UpdateActionGroup[T], - expiry: FiniteDuration): Fox[Unit] = - for { - _ <- Fox.runIf(transactionGroupIndex > 0)( - Fox.assertTrue( - uncommittedUpdatesStore.contains(transactionGroupKey( - tracingId, - transactionId, - transactionGroupIndex - 1, - version))) ?~> s"Incorrect transaction index. Got: $transactionGroupIndex but ${transactionGroupIndex - 1} does not exist" ~> CONFLICT) - _ <- uncommittedUpdatesStore.insert(transactionGroupKey(tracingId, transactionId, transactionGroupIndex, version), - Json.toJson(updateGroup).toString(), - Some(expiry)) - } yield () - - def getAllUncommittedFor(tracingId: String, transactionId: String): Fox[List[UpdateActionGroup[T]]] = - for { - raw: Seq[String] <- uncommittedUpdatesStore.findAllConditional(patternFor(tracingId, transactionId)) - parsed: Seq[UpdateActionGroup[T]] = raw.flatMap(itemAsString => - JsonHelper.jsResultToOpt(Json.parse(itemAsString).validate[UpdateActionGroup[T]])) - } yield parsed.toList.sortBy(_.transactionGroupIndex) - - def removeAllUncommittedFor(tracingId: String, transactionId: String): Fox[Unit] = - uncommittedUpdatesStore.removeAllConditional(patternFor(tracingId, transactionId)) - - private def migrateTracing(tracingFox: Fox[T], tracingId: String): Fox[T] = - tracingMigrationService.migrateTracing(tracingFox).flatMap { - case (tracing, hasChanged) => - if (hasChanged) - save(tracing, Some(tracingId), currentVersion(tracing)).map(_ => tracing) - else - Fox.successful(tracing) - } - - def handleUpdateGroup(tracingId: String, - updateGroup: UpdateActionGroup[T], - previousVersion: Long, - userToken: Option[String]): Fox[_] - - def applyPendingUpdates(tracing: T, tracingId: String, targetVersion: Option[Long]): Fox[T] = Fox.successful(tracing) - - def find(tracingId: String, - version: Option[Long] = None, - useCache: Boolean = true, - applyUpdates: Boolean = false): Fox[T] = - if (tracingId == TracingIds.dummyTracingId) - Fox.successful(dummyTracing) - else { - val tracingFox = tracingStore.get(tracingId, version)(fromProtoBytes[T]).map(_.value) - tracingFox.flatMap { tracing => - val updatedTracing = if (applyUpdates) { - applyPendingUpdates(tracing, tracingId, version) - } else { - Fox.successful(tracing) - } - migrateTracing(updatedTracing, tracingId) - }.orElse { - if (useCache) - temporaryTracingStore.find(tracingId) - else - tracingFox - } - } - - def findMultiple(selectors: List[Option[TracingSelector]], - useCache: Boolean = true, - applyUpdates: Boolean = false): Fox[List[Option[T]]] = - Fox.combined { - selectors.map { - case Some(selector) => find(selector.tracingId, selector.version, useCache, applyUpdates).map(Some(_)) - case None => Fox.successful(None) - } - } - - def generateTracingId: String = UUID.randomUUID.toString - - def save(tracing: T, tracingId: Option[String], version: Long, toCache: Boolean = false): Fox[String] = { - val id = tracingId.getOrElse(generateTracingId) - if (toCache) { - temporaryTracingStore.insert(id, tracing, Some(temporaryStoreTimeout)) - temporaryTracingIdStore.insert(temporaryIdKey(id), "", Some(temporaryIdStoreTimeout)) - Fox.successful(id) - } else { - tracingStore.put(id, version, tracing).map(_ => id) - } - } - - private def handledGroupKey(tracingId: String, transactionId: String, version: Long, transactionGroupIndex: Int) = - s"handledGroup___${tracingId}___${transactionId}___${version}___$transactionGroupIndex" - - def saveToHandledGroupIdStore(tracingId: String, - transactionId: String, - version: Long, - transactionGroupIndex: Int): Fox[Unit] = { - val key = handledGroupKey(tracingId, transactionId, version, transactionGroupIndex) - handledGroupIdStore.insert(key, "()", Some(handledGroupCacheExpiry)) - } - - def handledGroupIdStoreContains(tracingId: String, - transactionId: String, - version: Long, - transactionGroupIndex: Int): Fox[Boolean] = - handledGroupIdStore.contains(handledGroupKey(tracingId, transactionId, version, transactionGroupIndex)) - - def merge(tracings: Seq[T], mergedVolumeStats: MergedVolumeStats, newEditableMappingIdOpt: Option[String]): Box[T] - - def remapTooLargeTreeIds(tracing: T): T = tracing - - def mergeVolumeData(tracingSelectors: Seq[TracingSelector], - tracings: Seq[T], - newId: String, - newVersion: Long, - toCache: Boolean, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[MergedVolumeStats] - - def mergeEditableMappings(tracingsWithIds: List[(T, String)], userToken: Option[String]): Fox[String] -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/UpdateActions.scala deleted file mode 100644 index aebd371ae76..00000000000 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/UpdateActions.scala +++ /dev/null @@ -1,91 +0,0 @@ -package com.scalableminds.webknossos.tracingstore.tracings - -import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing -import play.api.libs.json._ -import scalapb.GeneratedMessage - -trait UpdateAction[T <: GeneratedMessage] { - - def actionTimestamp: Option[Long] - - def actionAuthorId: Option[String] - - def applyOn(tracing: T): T = tracing - - def addTimestamp(timestamp: Long): UpdateAction[T] = this - - def addInfo(info: Option[String]): UpdateAction[T] = this - - def addAuthorId(authorId: Option[String]): UpdateAction[T] = this - - def transformToCompact: UpdateAction[T] = this - - // For analytics we wan to know how many changes are view only (e.g. move camera, toggle tree visibility) - // Overridden in subclasses - def isViewOnlyChange: Boolean = false -} - -object UpdateAction { - type SkeletonUpdateAction = UpdateAction[SkeletonTracing] - type VolumeUpdateAction = UpdateAction[VolumeTracing] -} - -case class UpdateActionGroup[T <: GeneratedMessage]( - version: Long, - timestamp: Long, - authorId: Option[String], - actions: List[UpdateAction[T]], - stats: Option[JsObject], - info: Option[String], - transactionId: String, - transactionGroupCount: Int, - transactionGroupIndex: Int -) { - def significantChangesCount: Int = actions.count(!_.isViewOnlyChange) - def viewChangesCount: Int = actions.count(_.isViewOnlyChange) -} - -object UpdateActionGroup { - - implicit def updateActionGroupReads[T <: GeneratedMessage]( - implicit fmt: Reads[UpdateAction[T]]): Reads[UpdateActionGroup[T]] = - (json: JsValue) => - for { - version <- json.validate((JsPath \ "version").read[Long]) - timestamp <- json.validate((JsPath \ "timestamp").read[Long]) - authorId <- json.validate((JsPath \ "authorId").readNullable[String]) - actions <- json.validate((JsPath \ "actions").read[List[UpdateAction[T]]]) - stats <- json.validate((JsPath \ "stats").readNullable[JsObject]) - info <- json.validate((JsPath \ "info").readNullable[String]) - transactionId <- json.validate((JsPath \ "transactionId").read[String]) - transactionGroupCount <- json.validate((JsPath \ "transactionGroupCount").read[Int]) - transactionGroupIndex <- json.validate((JsPath \ "transactionGroupIndex").read[Int]) - } yield { - UpdateActionGroup[T](version, - timestamp, - authorId, - actions, - stats, - info, - transactionId, - transactionGroupCount, - transactionGroupIndex) - } - - implicit def updateActionGroupWrites[T <: GeneratedMessage]( - implicit fmt: Writes[UpdateAction[T]]): Writes[UpdateActionGroup[T]] = - (value: UpdateActionGroup[T]) => - Json.obj( - "version" -> value.version, - "timestamp" -> value.timestamp, - "authorId" -> value.authorId, - "actions" -> Json.toJson(value.actions), - "stats" -> value.stats, - "info" -> value.info, - "transactionId" -> value.transactionId, - "transactionGroupCount" -> value.transactionGroupCount, - "transactionGroupIndex" -> value.transactionGroupIndex - ) - -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingElementKeys.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingElementKeys.scala new file mode 100644 index 00000000000..af9f7a2a287 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingElementKeys.scala @@ -0,0 +1,18 @@ +package com.scalableminds.webknossos.tracingstore.tracings.editablemapping + +import net.liftweb.common.Box +import net.liftweb.common.Box.tryo + +trait EditableMappingElementKeys { + + protected def agglomerateGraphKey(mappingId: String, agglomerateId: Long): String = + s"$mappingId/$agglomerateId" + + protected def segmentToAgglomerateKey(mappingId: String, chunkId: Long): String = + s"$mappingId/$chunkId" + + protected def chunkIdFromSegmentToAgglomerateKey(key: String): Box[Long] = tryo(key.split("/")(1).toLong) + + protected def agglomerateIdFromAgglomerateGraphKey(key: String): Box[Long] = tryo(key.split("/")(1).toLong) + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index 96d44509897..b1140ca2857 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.tools.Fox @@ -21,6 +22,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ import ucar.ma2.{Array => MultiArray} import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import scala.concurrent.ExecutionContext @@ -29,16 +31,15 @@ class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketP override def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[Array[Byte]] = { val bucket: BucketPosition = readInstruction.bucket for { - editableMappingId <- Fox.successful(layer.name) + tracingId <- Fox.successful(layer.name) _ <- bool2Fox(layer.doesContainBucket(bucket)) remoteFallbackLayer <- layer.editableMappingService .remoteFallbackLayerFromVolumeTracing(layer.tracing, layer.tracingId) // called here to ensure updates are applied - (editableMappingInfo, editableMappingVersion) <- layer.editableMappingService.getInfoAndActualVersion( - editableMappingId, - requestedVersion = None, - remoteFallbackLayer = remoteFallbackLayer, - userToken = layer.token) + editableMappingInfo <- layer.annotationService.findEditableMappingInfo( + layer.annotationId, + tracingId, + Some(layer.version))(ec, layer.tokenContext) dataRequest: WebknossosDataRequest = WebknossosDataRequest( position = Vec3Int(bucket.topLeft.mag1X, bucket.topLeft.mag1Y, bucket.topLeft.mag1Z), mag = bucket.mag, @@ -48,18 +49,17 @@ class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketP version = None, additionalCoordinates = readInstruction.bucket.additionalCoordinates ) - (unmappedData, indices) <- layer.editableMappingService.getFallbackDataFromDatastore(remoteFallbackLayer, - List(dataRequest), - layer.token) + (unmappedData, indices) <- layer.editableMappingService + .getFallbackDataFromDatastore(remoteFallbackLayer, List(dataRequest))(ec, layer.tokenContext) _ <- bool2Fox(indices.isEmpty) unmappedDataTyped <- layer.editableMappingService.bytesToUnsignedInt(unmappedData, layer.tracing.elementClass) segmentIds = layer.editableMappingService.collectSegmentIds(unmappedDataTyped) - relevantMapping <- layer.editableMappingService.generateCombinedMappingForSegmentIds(segmentIds, - editableMappingInfo, - editableMappingVersion, - editableMappingId, - remoteFallbackLayer, - layer.token) + relevantMapping <- layer.editableMappingService.generateCombinedMappingForSegmentIds( + segmentIds, + editableMappingInfo, + layer.version, + tracingId, + remoteFallbackLayer)(layer.tokenContext) mappedData: Array[Byte] <- layer.editableMappingService.mapData(unmappedDataTyped, relevantMapping, layer.elementClass) @@ -72,9 +72,11 @@ case class EditableMappingLayer(name: String, resolutions: List[Vec3Int], largestSegmentId: Option[Long], elementClass: ElementClass.Value, - token: Option[String], + tokenContext: TokenContext, tracing: VolumeTracing, + annotationId: String, tracingId: String, + annotationService: TSAnnotationService, editableMappingService: EditableMappingService) extends SegmentationLayer { override val mags: List[MagLocator] = List.empty // MagLocators do not apply for annotation layers @@ -90,7 +92,7 @@ case class EditableMappingLayer(name: String, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]): BucketProvider = new EditableMappingBucketProvider(layer = this) - override def bucketProviderCacheKey: String = s"$name-token=$token" + override def bucketProviderCacheKey: String = s"$name-token=${tokenContext.userTokenOpt}" override def mappings: Option[Set[String]] = None @@ -99,4 +101,6 @@ case class EditableMappingLayer(name: String, override def adminViewConfiguration: Option[LayerViewConfiguration] = None override def additionalAxes: Option[Seq[AdditionalAxis]] = None + + def version: Long = tracing.version } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 2ec9607a77c..251e9fc509a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -1,13 +1,14 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo -import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.SegmentToAgglomerateProto +import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.SegmentToAgglomerateChunkProto import com.scalableminds.webknossos.datastore.SkeletonTracing.{Edge, Tree, TreeTypeProto} import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto @@ -21,6 +22,7 @@ import com.scalableminds.webknossos.datastore.services.{ AdHocMeshServiceHolder, BinaryDataService } +import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ FallbackDataHelper, KeyValueStoreImplicits, @@ -34,11 +36,10 @@ import net.liftweb.common.{Box, Empty, Failure, Full} import net.liftweb.common.Box.tryo import org.jgrapht.alg.flow.PushRelabelMFImpl import org.jgrapht.graph.{DefaultWeightedEdge, SimpleWeightedGraph} -import play.api.libs.json.{JsObject, JsValue, Json, OFormat} +import play.api.libs.json.{JsObject, Json, OFormat} import java.nio.file.Paths import java.util -import java.util.UUID import scala.concurrent.ExecutionContext import scala.concurrent.duration._ import scala.jdk.CollectionConverters.CollectionHasAsScala @@ -53,15 +54,14 @@ case class MinCutParameters( segmentId1: Long, segmentId2: Long, mag: Vec3Int, - agglomerateId: Long, - editableMappingId: String + agglomerateId: Long ) object MinCutParameters { implicit val jsonFormat: OFormat[MinCutParameters] = Json.format[MinCutParameters] } -case class NeighborsParameters(segmentId: Long, mag: Vec3Int, agglomerateId: Long, editableMappingId: String) +case class NeighborsParameters(segmentId: Long, mag: Vec3Int, agglomerateId: Long) object NeighborsParameters { implicit val jsonFormat: OFormat[NeighborsParameters] = Json.format[NeighborsParameters] @@ -96,84 +96,49 @@ class EditableMappingService @Inject()( extends KeyValueStoreImplicits with FallbackDataHelper with FoxImplicits + with ReversionHelper + with EditableMappingElementKeys with LazyLogging with ProtoGeometryImplicits { val defaultSegmentToAgglomerateChunkSize: Int = 64 * 1024 // max. 1 MiB chunks (two 8-byte numbers per element) - private def generateId: String = UUID.randomUUID.toString - val binaryDataService = new BinaryDataService(Paths.get(""), None, None, None, None) + adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) private val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService - private lazy val materializedInfoCache: AlfuCache[(String, Long), EditableMappingInfo] = AlfuCache(maxCapacity = 100) - private lazy val segmentToAgglomerateChunkCache: AlfuCache[(String, Long, Long), Seq[(Long, Long)]] = AlfuCache() private lazy val agglomerateToGraphCache: AlfuCache[(String, Long, Long), AgglomerateGraph] = AlfuCache(maxCapacity = 50) - def infoJson(tracingId: String, - editableMappingInfo: EditableMappingInfo, - editableMappingId: String, - version: Option[Long]): Fox[JsObject] = - for { - version <- getClosestMaterializableVersionOrZero(editableMappingId, version) - } yield - Json.obj( - "mappingName" -> editableMappingId, - "version" -> version, - "tracingId" -> tracingId, - "baseMappingName" -> editableMappingInfo.baseMappingName, - "largestAgglomerateId" -> editableMappingInfo.largestAgglomerateId, - "createdTimestamp" -> editableMappingInfo.createdTimestamp - ) + def infoJson(tracingId: String, editableMappingInfo: EditableMappingInfo): JsObject = + Json.obj( + "tracingId" -> tracingId, + "baseMappingName" -> editableMappingInfo.baseMappingName, + "largestAgglomerateId" -> editableMappingInfo.largestAgglomerateId, + "createdTimestamp" -> editableMappingInfo.createdTimestamp + ) - def create(baseMappingName: String): Fox[(String, EditableMappingInfo)] = { - val newId = generateId - val newEditableMappingInfo = EditableMappingInfo( + def create(baseMappingName: String): EditableMappingInfo = + EditableMappingInfo( baseMappingName = baseMappingName, createdTimestamp = Instant.now.epochMillis, largestAgglomerateId = 0L ) - for { - _ <- tracingDataStore.editableMappingsInfo.put(newId, 0L, toProtoBytes(newEditableMappingInfo)) - } yield (newId, newEditableMappingInfo) - } - def duplicate(editableMappingIdOpt: Option[String], - version: Option[Long], - remoteFallbackLayerBox: Box[RemoteFallbackLayer], - userToken: Option[String]): Fox[String] = - for { - editableMappingId <- editableMappingIdOpt ?~> "duplicate on editable mapping without id" - remoteFallbackLayer <- remoteFallbackLayerBox ?~> "duplicate on editable mapping without remote fallback layer" - editableMappingInfoAndVersion <- getInfoAndActualVersion(editableMappingId, - version, - remoteFallbackLayer, - userToken) - newIdAndInfoV0 <- create(editableMappingInfoAndVersion._1.baseMappingName) - newId = newIdAndInfoV0._1 - newVersion = editableMappingInfoAndVersion._2 - _ <- tracingDataStore.editableMappingsInfo.put(newId, newVersion, toProtoBytes(editableMappingInfoAndVersion._1)) - _ <- duplicateSegmentToAgglomerate(editableMappingId, newId, newVersion) - _ <- duplicateAgglomerateToGraph(editableMappingId, newId, newVersion) - updateActionsWithVersions <- getUpdateActionsWithVersions(editableMappingId, editableMappingInfoAndVersion._2, 0L) - _ <- Fox.serialCombined(updateActionsWithVersions) { - updateActionsWithVersion: (Long, List[EditableMappingUpdateAction]) => - tracingDataStore.editableMappingUpdates.put(newId, updateActionsWithVersion._1, updateActionsWithVersion._2) - } - } yield newId - - private def duplicateSegmentToAgglomerate(editableMappingId: String, newId: String, newVersion: Long): Fox[Unit] = { - val iterator = - new VersionedFossilDbIterator(editableMappingId, + def duplicateSegmentToAgglomerate(sourceTracingId: String, + newId: String, + sourceVersion: Long, + newVersion: Long): Fox[Unit] = { + val sourceIterator = + new VersionedFossilDbIterator(sourceTracingId, tracingDataStore.editableMappingsSegmentToAgglomerate, - Some(newVersion)) + Some(sourceVersion)) for { - _ <- Fox.combined(iterator.map { keyValuePair => + _ <- Fox.combined(sourceIterator.map { keyValuePair => for { chunkId <- chunkIdFromSegmentToAgglomerateKey(keyValuePair.key).toFox newKey = segmentToAgglomerateKey(newId, chunkId) @@ -185,13 +150,16 @@ class EditableMappingService @Inject()( } yield () } - private def duplicateAgglomerateToGraph(editableMappingId: String, newId: String, newVersion: Long): Fox[Unit] = { - val iterator = - new VersionedFossilDbIterator(editableMappingId, + def duplicateAgglomerateToGraph(sourceTracingId: String, + newId: String, + sourceVersion: Long, + newVersion: Long): Fox[Unit] = { + val sourceIterator = + new VersionedFossilDbIterator(sourceTracingId, tracingDataStore.editableMappingsAgglomerateToGraph, - Some(newVersion)) + Some(sourceVersion)) for { - _ <- Fox.combined(iterator.map { keyValuePair => + _ <- Fox.combined(sourceIterator.map { keyValuePair => for { agglomerateId <- agglomerateIdFromAgglomerateGraphKey(keyValuePair.key).toFox newKey = agglomerateGraphKey(newId, agglomerateId) @@ -201,220 +169,67 @@ class EditableMappingService @Inject()( } yield () } - def updateActionLog(editableMappingId: String): Fox[JsValue] = { - def versionedTupleToJson(tuple: (Long, List[EditableMappingUpdateAction])): JsObject = - Json.obj( - "version" -> tuple._1, - "value" -> Json.toJson(tuple._2) - ) - - for { - updates <- tracingDataStore.editableMappingUpdates.getMultipleVersionsAsVersionValueTuple(editableMappingId)( - fromJsonBytes[List[EditableMappingUpdateAction]]) - updateActionGroupsJs = updates.map(versionedTupleToJson) - } yield Json.toJson(updateActionGroupsJs) - } - - def getInfo(editableMappingId: String, - version: Option[Long] = None, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[EditableMappingInfo] = - for { - (info, _) <- getInfoAndActualVersion(editableMappingId, version, remoteFallbackLayer, userToken) - } yield info - - def getBaseMappingName(editableMappingId: String): Fox[Option[String]] = - for { - desiredVersion <- getClosestMaterializableVersionOrZero(editableMappingId, None) - infoBox <- getClosestMaterialized(editableMappingId, desiredVersion).futureBox - } yield - infoBox match { - case Full(info) => Some(info.value.baseMappingName) - case _ => None - } - - def getInfoAndActualVersion(editableMappingId: String, - requestedVersion: Option[Long] = None, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[(EditableMappingInfo, Long)] = - for { - desiredVersion <- getClosestMaterializableVersionOrZero(editableMappingId, requestedVersion) - materializedInfo <- materializedInfoCache.getOrLoad( - (editableMappingId, desiredVersion), - _ => applyPendingUpdates(editableMappingId, desiredVersion, remoteFallbackLayer, userToken)) - } yield (materializedInfo, desiredVersion) - - def update(editableMappingId: String, - updateActionGroup: EditableMappingUpdateActionGroup, - newVersion: Long, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Unit] = - for { - actionsWithTimestamp <- Fox.successful(updateActionGroup.actions.map(_.addTimestamp(updateActionGroup.timestamp))) - _ <- dryApplyUpdates(editableMappingId, newVersion, actionsWithTimestamp, remoteFallbackLayer, userToken) ?~> "editableMapping.dryUpdate.failed" - _ <- tracingDataStore.editableMappingUpdates.put(editableMappingId, newVersion, actionsWithTimestamp) - } yield () - - private def dryApplyUpdates(editableMappingId: String, - newVersion: Long, - updates: List[EditableMappingUpdateAction], - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Unit] = - for { - (previousInfo, previousVersion) <- getInfoAndActualVersion(editableMappingId, - None, - remoteFallbackLayer, - userToken) - updater = new EditableMappingUpdater( - editableMappingId, - previousInfo.baseMappingName, - previousVersion, - newVersion, - remoteFallbackLayer, - userToken, - remoteDatastoreClient, - this, - tracingDataStore, - relyOnAgglomerateIds = updates.length <= 1 - ) - updated <- updater.applyUpdatesAndSave(previousInfo, updates, dry = true) ?~> "editableMapping.update.failed" - } yield () - - def applyPendingUpdates(editableMappingId: String, - desiredVersion: Long, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[EditableMappingInfo] = - for { - closestMaterializedWithVersion <- getClosestMaterialized(editableMappingId, desiredVersion) - updatedEditableMappingInfo: EditableMappingInfo <- if (desiredVersion == closestMaterializedWithVersion.version) - Fox.successful(closestMaterializedWithVersion.value) - else - for { - pendingUpdates <- getPendingUpdates(editableMappingId, closestMaterializedWithVersion.version, desiredVersion) - updater = new EditableMappingUpdater( - editableMappingId, - closestMaterializedWithVersion.value.baseMappingName, - closestMaterializedWithVersion.version, - desiredVersion, - remoteFallbackLayer, - userToken, - remoteDatastoreClient, - this, - tracingDataStore, - relyOnAgglomerateIds = pendingUpdates.length <= 1 - ) - - updated <- updater.applyUpdatesAndSave(closestMaterializedWithVersion.value, pendingUpdates) - } yield updated - } yield updatedEditableMappingInfo - - private def getClosestMaterialized(editableMappingId: String, - desiredVersion: Long): Fox[VersionedKeyValuePair[EditableMappingInfo]] = - tracingDataStore.editableMappingsInfo.get(editableMappingId, version = Some(desiredVersion))( - fromProtoBytes[EditableMappingInfo]) - - def getClosestMaterializableVersionOrZero(editableMappingId: String, desiredVersion: Option[Long]): Fox[Long] = - tracingDataStore.editableMappingUpdates.getVersion(editableMappingId, - version = desiredVersion, - mayBeEmpty = Some(true), - emptyFallback = Some(0L)) - - private def getPendingUpdates(editableMappingId: String, - closestMaterializedVersion: Long, - closestMaterializableVersion: Long): Fox[List[EditableMappingUpdateAction]] = - if (closestMaterializableVersion == closestMaterializedVersion) { - Fox.successful(List.empty) - } else { - for { - updates <- getUpdateActionsWithVersions(editableMappingId, - newestVersion = closestMaterializableVersion, - oldestVersion = closestMaterializedVersion + 1L) - } yield updates.map(_._2).reverse.flatten - } - - private def getUpdateActionsWithVersions( - editableMappingId: String, - newestVersion: Long, - oldestVersion: Long): Fox[List[(Long, List[EditableMappingUpdateAction])]] = { - val batchRanges = batchRangeInclusive(oldestVersion, newestVersion, batchSize = 100) - for { - updateActionBatches <- Fox.serialCombined(batchRanges.toList) { batchRange => - val batchFrom = batchRange._1 - val batchTo = batchRange._2 - for { - res <- tracingDataStore.editableMappingUpdates - .getMultipleVersionsAsVersionValueTuple[List[EditableMappingUpdateAction]]( - editableMappingId, - Some(batchTo), - Some(batchFrom) - )(fromJsonBytes[List[EditableMappingUpdateAction]]) - } yield res - } - flat = updateActionBatches.flatten - } yield flat - } + def assertTracingHasEditableMapping(tracing: VolumeTracing)(implicit ec: ExecutionContext): Fox[Unit] = + bool2Fox(tracing.getHasEditableMapping) ?~> "annotation.volume.noEditableMapping" def findSegmentIdAtPositionIfNeeded(remoteFallbackLayer: RemoteFallbackLayer, positionOpt: Option[Vec3Int], segmentIdOpt: Option[Long], - mag: Vec3Int, - userToken: Option[String]): Fox[Long] = + mag: Vec3Int)(implicit tc: TokenContext): Fox[Long] = segmentIdOpt match { case Some(segmentId) => Fox.successful(segmentId) - case None => findSegmentIdAtPosition(remoteFallbackLayer, positionOpt, mag, userToken) + case None => findSegmentIdAtPosition(remoteFallbackLayer, positionOpt, mag) } private def findSegmentIdAtPosition(remoteFallbackLayer: RemoteFallbackLayer, positionOpt: Option[Vec3Int], - mag: Vec3Int, - userToken: Option[String]): Fox[Long] = + mag: Vec3Int)(implicit tc: TokenContext): Fox[Long] = for { pos <- positionOpt.toFox ?~> "segment id or position is required in editable mapping action" - voxelAsBytes: Array[Byte] <- remoteDatastoreClient.getVoxelAtPosition(userToken, remoteFallbackLayer, pos, mag) + voxelAsBytes: Array[Byte] <- remoteDatastoreClient.getVoxelAtPosition(remoteFallbackLayer, pos, mag) voxelAsLongArray: Array[Long] <- bytesToLongs(voxelAsBytes, remoteFallbackLayer.elementClass) _ <- Fox.bool2Fox(voxelAsLongArray.length == 1) ?~> s"Expected one, got ${voxelAsLongArray.length} segment id values for voxel." voxelAsLong <- voxelAsLongArray.headOption } yield voxelAsLong - def volumeData(tracing: VolumeTracing, - tracingId: String, - dataRequests: DataRequestCollection, - userToken: Option[String]): Fox[(Array[Byte], List[Int])] = - for { - editableMappingId <- tracing.mappingName.toFox - dataLayer = editableMappingLayer(editableMappingId, tracing, tracingId, userToken) - requests = dataRequests.map(r => - DataServiceDataRequest(null, dataLayer, r.cuboid(dataLayer), r.settings.copy(appliedAgglomerate = None))) - data <- binaryDataService.handleDataRequests(requests) - } yield data + def volumeData(editableMappingLayer: EditableMappingLayer, + dataRequests: DataRequestCollection): Fox[(Array[Byte], List[Int])] = { + val requests = dataRequests.map( + r => + DataServiceDataRequest(null, + editableMappingLayer, + r.cuboid(editableMappingLayer), + r.settings.copy(appliedAgglomerate = None))) + binaryDataService.handleDataRequests(requests) + } private def getSegmentToAgglomerateForSegmentIds(segmentIds: Set[Long], - editableMappingId: String, + tracingId: String, version: Long): Fox[Map[Long, Long]] = { val chunkIds = segmentIds.map(_ / defaultSegmentToAgglomerateChunkSize) for { maps: List[Seq[(Long, Long)]] <- Fox.serialCombined(chunkIds.toList)(chunkId => - getSegmentToAgglomerateChunkFiltered(editableMappingId, chunkId, version, segmentIds)) + getSegmentToAgglomerateChunkFiltered(tracingId, chunkId, version, segmentIds)) } yield maps.flatten.toMap } - private def getSegmentToAgglomerateChunkFiltered(editableMappingId: String, + private def getSegmentToAgglomerateChunkFiltered(tracingId: String, chunkId: Long, version: Long, segmentIds: Set[Long]): Fox[Seq[(Long, Long)]] = for { - segmentToAgglomerateChunk <- getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId, version) + segmentToAgglomerateChunk <- getSegmentToAgglomerateChunkWithEmptyFallback(tracingId, chunkId, version) filtered = segmentToAgglomerateChunk.filter(pair => segmentIds.contains(pair._1)) } yield filtered - def getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId: String, + def getSegmentToAgglomerateChunkWithEmptyFallback(tracingId: String, chunkId: Long, version: Long): Fox[Seq[(Long, Long)]] = segmentToAgglomerateChunkCache.getOrLoad( - (editableMappingId, chunkId, version), + (tracingId, chunkId, version), _ => for { - chunkBox: Box[Seq[(Long, Long)]] <- getSegmentToAgglomerateChunk(editableMappingId, chunkId, Some(version)).futureBox + chunkBox: Box[Seq[(Long, Long)]] <- getSegmentToAgglomerateChunk(tracingId, chunkId, Some(version)).futureBox segmentToAgglomerate <- chunkBox match { case Full(chunk) => Fox.successful(chunk) case Empty => Fox.successful(Seq.empty[(Long, Long)]) @@ -423,57 +238,58 @@ class EditableMappingService @Inject()( } yield segmentToAgglomerate ) - private def getSegmentToAgglomerateChunk(editableMappingId: String, + private def getSegmentToAgglomerateChunk(tracingId: String, chunkId: Long, - version: Option[Long]): Fox[Seq[(Long, Long)]] = + version: Option[Long]): Fox[Seq[(Long, Long)]] = { + val chunkKey = segmentToAgglomerateKey(tracingId, chunkId) + getSegmentToAgglomerateChunk(chunkKey, version) + } + + def getSegmentToAgglomerateChunk(chunkKey: String, version: Option[Long]): Fox[Seq[(Long, Long)]] = for { - keyValuePair: VersionedKeyValuePair[SegmentToAgglomerateProto] <- tracingDataStore.editableMappingsSegmentToAgglomerate - .get(segmentToAgglomerateKey(editableMappingId, chunkId), version, mayBeEmpty = Some(true))( - fromProtoBytes[SegmentToAgglomerateProto]) - valueProto = keyValuePair.value + keyValuePairBytes: VersionedKeyValuePair[Array[Byte]] <- tracingDataStore.editableMappingsSegmentToAgglomerate + .get(chunkKey, version, mayBeEmpty = Some(true)) + valueProto <- if (isRevertedElement(keyValuePairBytes.value)) Fox.empty + else fromProtoBytes[SegmentToAgglomerateChunkProto](keyValuePairBytes.value).toFox asSequence = valueProto.segmentToAgglomerate.map(pair => pair.segmentId -> pair.agglomerateId) } yield asSequence - def generateCombinedMappingForSegmentIds(segmentIds: Set[Long], - editableMapping: EditableMappingInfo, - editableMappingVersion: Long, - editableMappingId: String, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Map[Long, Long]] = + def generateCombinedMappingForSegmentIds( + segmentIds: Set[Long], + editableMapping: EditableMappingInfo, + editableMappingVersion: Long, + tracingId: String, + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Map[Long, Long]] = for { editableMappingForSegmentIds <- getSegmentToAgglomerateForSegmentIds(segmentIds, - editableMappingId, + tracingId, editableMappingVersion) segmentIdsInEditableMapping: Set[Long] = editableMappingForSegmentIds.keySet segmentIdsInBaseMapping: Set[Long] = segmentIds.diff(segmentIdsInEditableMapping) baseMappingSubset <- getBaseSegmentToAgglomerate(editableMapping.baseMappingName, segmentIdsInBaseMapping, - remoteFallbackLayer, - userToken) + remoteFallbackLayer) } yield editableMappingForSegmentIds ++ baseMappingSubset - def getAgglomerateSkeletonWithFallback(editableMappingId: String, + def getAgglomerateSkeletonWithFallback(tracingId: String, + version: Long, + editableMappingInfo: EditableMappingInfo, remoteFallbackLayer: RemoteFallbackLayer, - agglomerateId: Long, - userToken: Option[String]): Fox[Array[Byte]] = + agglomerateId: Long)(implicit tc: TokenContext): Fox[Array[Byte]] = for { - // called here to ensure updates are applied - editableMappingInfo <- getInfo(editableMappingId, version = None, remoteFallbackLayer, userToken) - agglomerateGraphBox <- getAgglomerateGraphForId(editableMappingId, agglomerateId, remoteFallbackLayer, userToken).futureBox + agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, version, agglomerateId).futureBox skeletonBytes <- agglomerateGraphBox match { case Full(agglomerateGraph) => - Fox.successful( - agglomerateGraphToSkeleton(editableMappingId, agglomerateGraph, remoteFallbackLayer, agglomerateId)) + Fox.successful(agglomerateGraphToSkeleton(tracingId, agglomerateGraph, remoteFallbackLayer, agglomerateId)) case Empty => - remoteDatastoreClient.getAgglomerateSkeleton(userToken, - remoteFallbackLayer, + remoteDatastoreClient.getAgglomerateSkeleton(remoteFallbackLayer, editableMappingInfo.baseMappingName, agglomerateId) case f: Failure => f.toFox } } yield skeletonBytes - private def agglomerateGraphToSkeleton(editableMappingId: String, + private def agglomerateGraphToSkeleton(tracingId: String, graph: AgglomerateGraph, remoteFallbackLayer: RemoteFallbackLayer, agglomerateId: Long): Array[Byte] = { @@ -497,7 +313,7 @@ class EditableMappingService @Inject()( createdTimestamp = System.currentTimeMillis(), nodes = nodes, edges = skeletonEdges, - name = s"agglomerate $agglomerateId ($editableMappingId)", + name = s"agglomerate $agglomerateId ($tracingId)", `type` = Some(TreeTypeProto.AGGLOMERATE) )) @@ -508,16 +324,15 @@ class EditableMappingService @Inject()( skeleton.toByteArray } - def getBaseSegmentToAgglomerate(mappingName: String, - segmentIds: Set[Long], - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Map[Long, Long]] = { + def getBaseSegmentToAgglomerate( + baseMappingName: String, + segmentIds: Set[Long], + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Map[Long, Long]] = { val segmentIdsOrdered = segmentIds.toList for { agglomerateIdsOrdered <- remoteDatastoreClient.getAgglomerateIdsForSegmentIds(remoteFallbackLayer, - mappingName, - segmentIdsOrdered, - userToken) + baseMappingName, + segmentIdsOrdered) } yield segmentIdsOrdered.zip(agglomerateIdsOrdered).toMap } @@ -554,105 +369,64 @@ class EditableMappingService @Inject()( bytes = UnsignedIntegerArray.toByteArray(unsignedIntArray, elementClass) } yield bytes - private def editableMappingLayer(mappingName: String, - tracing: VolumeTracing, - tracingId: String, - userToken: Option[String]): EditableMappingLayer = - EditableMappingLayer( - mappingName, - tracing.boundingBox, - resolutions = tracing.mags.map(vec3IntFromProto).toList, - largestSegmentId = Some(0L), - elementClass = tracing.elementClass, - userToken, - tracing = tracing, - tracingId = tracingId, - editableMappingService = this + def createAdHocMesh(editableMappingLayer: EditableMappingLayer, + request: WebknossosAdHocMeshRequest): Fox[(Array[Float], List[Int])] = { + val adHocMeshRequest = AdHocMeshRequest( + dataSource = None, + dataLayer = editableMappingLayer, + cuboid = request.cuboid(editableMappingLayer), + segmentId = request.segmentId, + voxelSizeFactor = request.voxelSizeFactorInUnit, + mapping = None, + mappingType = None, + findNeighbors = request.findNeighbors ) + adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) + } - def createAdHocMesh(tracing: VolumeTracing, - tracingId: String, - request: WebknossosAdHocMeshRequest, - userToken: Option[String]): Fox[(Array[Float], List[Int])] = - for { - mappingName <- tracing.mappingName.toFox - segmentationLayer = editableMappingLayer(mappingName, tracing, tracingId, userToken) - adHocMeshRequest = AdHocMeshRequest( - dataSource = None, - dataLayer = segmentationLayer, - cuboid = request.cuboid(segmentationLayer), - segmentId = request.segmentId, - voxelSizeFactor = request.voxelSizeFactorInUnit, - mapping = None, - mappingType = None, - findNeighbors = request.findNeighbors - ) - result <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) - } yield result - - def agglomerateGraphKey(mappingId: String, agglomerateId: Long): String = - s"$mappingId/$agglomerateId" - - def segmentToAgglomerateKey(mappingId: String, chunkId: Long): String = - s"$mappingId/$chunkId" - - private def chunkIdFromSegmentToAgglomerateKey(key: String): Box[Long] = tryo(key.split("/")(1).toLong) - - private def agglomerateIdFromAgglomerateGraphKey(key: String): Box[Long] = tryo(key.split("/")(1).toLong) - - def getAgglomerateGraphForId(mappingId: String, - agglomerateId: Long, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String], - requestedVersion: Option[Long] = None): Fox[AgglomerateGraph] = + def getAgglomerateGraphForId(tracingId: String, version: Long, agglomerateId: Long): Fox[AgglomerateGraph] = for { - // called here to ensure updates are applied - (_, version) <- getInfoAndActualVersion(mappingId, requestedVersion, remoteFallbackLayer, userToken) agglomerateGraph <- agglomerateToGraphCache.getOrLoad( - (mappingId, agglomerateId, version), + (tracingId, agglomerateId, version), _ => - tracingDataStore.editableMappingsAgglomerateToGraph - .get(agglomerateGraphKey(mappingId, agglomerateId), Some(version), mayBeEmpty = Some(true))( - fromProtoBytes[AgglomerateGraph]) - .map(_.value) + for { + graphBytes: VersionedKeyValuePair[Array[Byte]] <- tracingDataStore.editableMappingsAgglomerateToGraph + .get(agglomerateGraphKey(tracingId, agglomerateId), Some(version), mayBeEmpty = Some(true)) + graphParsed <- if (isRevertedElement(graphBytes.value)) Fox.empty + else fromProtoBytes[AgglomerateGraph](graphBytes.value).toFox + } yield graphParsed ) } yield agglomerateGraph - def getAgglomerateGraphForIdWithFallback(mapping: EditableMappingInfo, - editableMappingId: String, - version: Option[Long], - agglomerateId: Long, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[AgglomerateGraph] = + def getAgglomerateGraphForIdWithFallback( + mapping: EditableMappingInfo, + tracingId: String, + version: Long, + agglomerateId: Long, + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[AgglomerateGraph] = for { - agglomerateGraphBox <- getAgglomerateGraphForId(editableMappingId, - agglomerateId, - remoteFallbackLayer, - userToken, - version).futureBox + agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, version, agglomerateId).futureBox agglomerateGraph <- agglomerateGraphBox match { case Full(agglomerateGraph) => Fox.successful(agglomerateGraph) case Empty => - remoteDatastoreClient.getAgglomerateGraph(remoteFallbackLayer, - mapping.baseMappingName, - agglomerateId, - userToken) + remoteDatastoreClient.getAgglomerateGraph(remoteFallbackLayer, mapping.baseMappingName, agglomerateId) case f: Failure => f.toFox } } yield agglomerateGraph - def agglomerateGraphMinCut(parameters: MinCutParameters, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[List[EdgeWithPositions]] = + def agglomerateGraphMinCut( + tracingId: String, + version: Long, + editableMappingInfo: EditableMappingInfo, + parameters: MinCutParameters, + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[List[EdgeWithPositions]] = for { // called here to ensure updates are applied - mapping <- getInfo(parameters.editableMappingId, version = None, remoteFallbackLayer, userToken) - agglomerateGraph <- getAgglomerateGraphForIdWithFallback(mapping, - parameters.editableMappingId, - None, + agglomerateGraph <- getAgglomerateGraphForIdWithFallback(editableMappingInfo, + tracingId, + version, parameters.agglomerateId, - remoteFallbackLayer, - userToken) + remoteFallbackLayer) ?~> "getAgglomerateGraph.failed" edgesToCut <- minCut(agglomerateGraph, parameters.segmentId1, parameters.segmentId2) ?~> "Could not calculate min-cut on agglomerate graph." edgesWithPositions = annotateEdgesWithPositions(edgesToCut, agglomerateGraph) } yield edgesWithPositions @@ -709,18 +483,18 @@ class EditableMappingService @Inject()( ) } - def agglomerateGraphNeighbors(parameters: NeighborsParameters, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[(Long, Seq[NodeWithPosition])] = + def agglomerateGraphNeighbors( + tracingId: String, + editableMappingInfo: EditableMappingInfo, + version: Long, + parameters: NeighborsParameters, + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[(Long, Seq[NodeWithPosition])] = for { - // called here to ensure updates are applied - mapping <- getInfo(parameters.editableMappingId, version = None, remoteFallbackLayer, userToken) - agglomerateGraph <- getAgglomerateGraphForIdWithFallback(mapping, - parameters.editableMappingId, - None, + agglomerateGraph <- getAgglomerateGraphForIdWithFallback(editableMappingInfo, + tracingId, + version, parameters.agglomerateId, - remoteFallbackLayer, - userToken) + remoteFallbackLayer) neighborNodes = neighbors(agglomerateGraph, parameters.segmentId) nodesWithPositions = annotateNodesWithPositions(neighborNodes, agglomerateGraph) } yield (parameters.segmentId, nodesWithPositions) @@ -735,56 +509,4 @@ class EditableMappingService @Inject()( neighborNodes } - def merge(editableMappingIds: List[String], - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[String] = - for { - firstMappingId <- editableMappingIds.headOption.toFox - before = Instant.now - newMappingId <- duplicate(Some(firstMappingId), version = None, Some(remoteFallbackLayer), userToken) - _ <- Fox.serialCombined(editableMappingIds.tail)(editableMappingId => - mergeInto(newMappingId, editableMappingId, remoteFallbackLayer, userToken)) - _ = logger.info(s"Merging ${editableMappingIds.length} editable mappings took ${Instant.since(before)}") - } yield newMappingId - - // read as: merge source into target (mutate target) - private def mergeInto(targetEditableMappingId: String, - sourceEditableMappingId: String, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Unit] = - for { - targetNewestVersion <- getClosestMaterializableVersionOrZero(targetEditableMappingId, None) - sourceNewestMaterializedWithVersion <- getInfoAndActualVersion(sourceEditableMappingId, - None, - remoteFallbackLayer, - userToken) - sourceNewestVersion = sourceNewestMaterializedWithVersion._2 - updateActionsWithVersions <- getUpdateActionsWithVersions(sourceEditableMappingId, sourceNewestVersion, 0L) - updateActionsToApply = updateActionsWithVersions.map(_._2).reverse.flatten - updater = new EditableMappingUpdater( - targetEditableMappingId, - sourceNewestMaterializedWithVersion._1.baseMappingName, - targetNewestVersion, - targetNewestVersion + sourceNewestVersion, - remoteFallbackLayer, - userToken, - remoteDatastoreClient, - this, - tracingDataStore, - relyOnAgglomerateIds = false - ) - _ <- updater.applyUpdatesAndSave(sourceNewestMaterializedWithVersion._1, updateActionsToApply) - _ <- Fox.serialCombined(updateActionsWithVersions) { updateActionsWithVersion => - tracingDataStore.editableMappingUpdates.put(targetEditableMappingId, - updateActionsWithVersion._1 + targetNewestVersion, - updateActionsWithVersion._2) - } - } yield () - - private def batchRangeInclusive(from: Long, to: Long, batchSize: Long): Seq[(Long, Long)] = - (0L to ((to - from) / batchSize)).map { batchIndex => - val batchFrom = batchIndex * batchSize + from - val batchTo = Math.min(to, (batchIndex + 1) * batchSize + from - 1) - (batchFrom, batchTo) - } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala new file mode 100644 index 00000000000..ec8d865ccf2 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala @@ -0,0 +1,121 @@ +package com.scalableminds.webknossos.tracingstore.tracings.editablemapping + +import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph +import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.SegmentToAgglomerateChunkProto +import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper +import com.scalableminds.webknossos.tracingstore.tracings.{ + FossilDBClient, + KeyValueStoreImplicits, + VersionedKeyValuePair +} +import net.liftweb.common.Full + +import scala.annotation.tailrec + +class VersionedAgglomerateToGraphIterator(prefix: String, + segmentToAgglomerateDataStore: FossilDBClient, + version: Option[Long] = None) + extends Iterator[(String, AgglomerateGraph, Long)] + with ReversionHelper + with KeyValueStoreImplicits { + private val batchSize = 64 + + private var currentStartAfterKey: Option[String] = None + private var currentBatchIterator: Iterator[VersionedKeyValuePair[Array[Byte]]] = fetchNext + private var nextGraph: Option[VersionedKeyValuePair[AgglomerateGraph]] = None + + private def fetchNext: Iterator[VersionedKeyValuePair[Array[Byte]]] = + segmentToAgglomerateDataStore.getMultipleKeys(currentStartAfterKey, Some(prefix), version, Some(batchSize)).iterator + + private def fetchNextAndSave = { + currentBatchIterator = fetchNext + currentBatchIterator + } + + @tailrec + private def getNextNonRevertedGraph: Option[VersionedKeyValuePair[AgglomerateGraph]] = + if (currentBatchIterator.hasNext) { + val chunk = currentBatchIterator.next() + currentStartAfterKey = Some(chunk.key) + val graphParsedBox = fromProtoBytes[AgglomerateGraph](chunk.value) + graphParsedBox match { + case _ if isRevertedElement(chunk.value) => getNextNonRevertedGraph + case Full(graphParsed) => Some(VersionedKeyValuePair(versionedKey = chunk.versionedKey, value = graphParsed)) + case _ => getNextNonRevertedGraph + } + } else { + if (!fetchNextAndSave.hasNext) None + else getNextNonRevertedGraph + } + + override def hasNext: Boolean = + if (nextGraph.isDefined) true + else { + nextGraph = getNextNonRevertedGraph + nextGraph.isDefined + } + + override def next(): (String, AgglomerateGraph, Long) = { + val nextRes = nextGraph match { + case Some(bucket) => bucket + case None => getNextNonRevertedGraph.get + } + nextGraph = None + (nextRes.key, nextRes.value, nextRes.version) + } + +} + +class VersionedSegmentToAgglomerateChunkIterator(prefix: String, + segmentToAgglomerateDataStore: FossilDBClient, + version: Option[Long] = None) + extends Iterator[(String, SegmentToAgglomerateChunkProto, Long)] + with ReversionHelper + with KeyValueStoreImplicits { + private val batchSize = 64 + + private var currentStartAfterKey: Option[String] = None + private var currentBatchIterator: Iterator[VersionedKeyValuePair[Array[Byte]]] = fetchNext + private var nextChunk: Option[VersionedKeyValuePair[SegmentToAgglomerateChunkProto]] = None + + private def fetchNext: Iterator[VersionedKeyValuePair[Array[Byte]]] = + segmentToAgglomerateDataStore.getMultipleKeys(currentStartAfterKey, Some(prefix), version, Some(batchSize)).iterator + + private def fetchNextAndSave = { + currentBatchIterator = fetchNext + currentBatchIterator + } + + @tailrec + private def getNextNonRevertedChunk: Option[VersionedKeyValuePair[SegmentToAgglomerateChunkProto]] = + if (currentBatchIterator.hasNext) { + val chunk = currentBatchIterator.next() + currentStartAfterKey = Some(chunk.key) + val chunkParsedBox = fromProtoBytes[SegmentToAgglomerateChunkProto](chunk.value) + chunkParsedBox match { + case _ if isRevertedElement(chunk.value) => getNextNonRevertedChunk + case Full(chunkParsed) => Some(VersionedKeyValuePair(versionedKey = chunk.versionedKey, value = chunkParsed)) + case _ => getNextNonRevertedChunk + } + } else { + if (!fetchNextAndSave.hasNext) None + else getNextNonRevertedChunk + } + + override def hasNext: Boolean = + if (nextChunk.isDefined) true + else { + nextChunk = getNextNonRevertedChunk + nextChunk.isDefined + } + + override def next(): (String, SegmentToAgglomerateChunkProto, Long) = { + val nextRes = nextChunk match { + case Some(bucket) => bucket + case None => getNextNonRevertedChunk.get + } + nextChunk = None + (nextRes.key, nextRes.value, nextRes.version) + } + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala index 652f13c96d9..9a8dcf07e74 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala @@ -1,24 +1,32 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping import com.scalableminds.util.geometry.Vec3Int -import play.api.libs.json.Format.GenericFormat +import com.scalableminds.webknossos.tracingstore.annotation.{LayerUpdateAction, UpdateAction} import play.api.libs.json._ -trait EditableMappingUpdateAction { - def addTimestamp(timestamp: Long): EditableMappingUpdateAction +trait EditableMappingUpdateAction extends LayerUpdateAction { + override def withActionTracingId(newTracingId: String): EditableMappingUpdateAction } // we switched from positions to segment ids in https://github.com/scalableminds/webknossos/pull/7742. // Both are now optional to support applying old update actions stored in the db. -case class SplitAgglomerateUpdateAction(agglomerateId: Long, +case class SplitAgglomerateUpdateAction(agglomerateId: Long, // Unused, we now look this up by position/segment segmentPosition1: Option[Vec3Int], segmentPosition2: Option[Vec3Int], segmentId1: Option[Long], segmentId2: Option[Long], mag: Vec3Int, - actionTimestamp: Option[Long] = None) + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends EditableMappingUpdateAction { override def addTimestamp(timestamp: Long): EditableMappingUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): EditableMappingUpdateAction = + this.copy(actionTracingId = newTracingId) } object SplitAgglomerateUpdateAction { @@ -27,48 +35,26 @@ object SplitAgglomerateUpdateAction { // we switched from positions to segment ids in https://github.com/scalableminds/webknossos/pull/7742. // Both are now optional to support applying old update actions stored in the db. -case class MergeAgglomerateUpdateAction(agglomerateId1: Long, - agglomerateId2: Long, +case class MergeAgglomerateUpdateAction(agglomerateId1: Long, // Unused, we now look this up by position/segment + agglomerateId2: Long, // Unused, we now look this up by position/segment segmentPosition1: Option[Vec3Int], segmentPosition2: Option[Vec3Int], segmentId1: Option[Long], segmentId2: Option[Long], mag: Vec3Int, - actionTimestamp: Option[Long] = None) + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends EditableMappingUpdateAction { override def addTimestamp(timestamp: Long): EditableMappingUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): EditableMappingUpdateAction = + this.copy(actionTracingId = newTracingId) } object MergeAgglomerateUpdateAction { implicit val jsonFormat: OFormat[MergeAgglomerateUpdateAction] = Json.format[MergeAgglomerateUpdateAction] } - -object EditableMappingUpdateAction { - - implicit object editableMappingUpdateActionFormat extends Format[EditableMappingUpdateAction] { - override def reads(json: JsValue): JsResult[EditableMappingUpdateAction] = - (json \ "name").validate[String].flatMap { - case "mergeAgglomerate" => (json \ "value").validate[MergeAgglomerateUpdateAction] - case "splitAgglomerate" => (json \ "value").validate[SplitAgglomerateUpdateAction] - case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") - } - - override def writes(o: EditableMappingUpdateAction): JsValue = o match { - case s: SplitAgglomerateUpdateAction => - Json.obj("name" -> "splitAgglomerate", "value" -> Json.toJson(s)(SplitAgglomerateUpdateAction.jsonFormat)) - case s: MergeAgglomerateUpdateAction => - Json.obj("name" -> "mergeAgglomerate", "value" -> Json.toJson(s)(MergeAgglomerateUpdateAction.jsonFormat)) - } - } - -} - -case class EditableMappingUpdateActionGroup( - version: Long, - timestamp: Long, - actions: List[EditableMappingUpdateAction] -) - -object EditableMappingUpdateActionGroup { - implicit val jsonFormat: OFormat[EditableMappingUpdateActionGroup] = Json.format[EditableMappingUpdateActionGroup] -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 2440e17a667..bdce3b46fb7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -1,14 +1,16 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping -import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.AgglomerateGraph.{AgglomerateEdge, AgglomerateGraph} import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.{ SegmentAgglomeratePair, - SegmentToAgglomerateProto + SegmentToAgglomerateChunkProto } import com.scalableminds.webknossos.tracingstore.TSRemoteDatastoreClient +import com.scalableminds.webknossos.tracingstore.annotation.{TSAnnotationService, UpdateAction} +import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ KeyValueStoreImplicits, RemoteFallbackLayer, @@ -29,54 +31,70 @@ import scala.jdk.CollectionConverters.CollectionHasAsScala // this results in only one version increment in the db per update group class EditableMappingUpdater( - editableMappingId: String, + annotationId: String, + tracingId: String, baseMappingName: String, oldVersion: Long, newVersion: Long, remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String], + tokenContext: TokenContext, remoteDatastoreClient: TSRemoteDatastoreClient, editableMappingService: EditableMappingService, - tracingDataStore: TracingDataStore, - relyOnAgglomerateIds: Boolean // False during merge and in case of multiple actions. Then, look up all agglomerate ids at positions + annotationService: TSAnnotationService, + tracingDataStore: TracingDataStore ) extends KeyValueStoreImplicits + with ReversionHelper with FoxImplicits + with EditableMappingElementKeys with LazyLogging { - private val segmentToAgglomerateBuffer: mutable.Map[String, Map[Long, Long]] = - new mutable.HashMap[String, Map[Long, Long]]() - private val agglomerateToGraphBuffer: mutable.Map[String, AgglomerateGraph] = - new mutable.HashMap[String, AgglomerateGraph]() + // chunkKey → (Map[segmentId → agglomerateId], isToBeReverted) + private val segmentToAgglomerateBuffer: mutable.Map[String, (Map[Long, Long], Boolean)] = + new mutable.HashMap[String, (Map[Long, Long], Boolean)]() + // agglomerateKey → (agglomerateGraph, isToBeReverted) + private val agglomerateToGraphBuffer: mutable.Map[String, (AgglomerateGraph, Boolean)] = + new mutable.HashMap[String, (AgglomerateGraph, Boolean)]() def applyUpdatesAndSave(existingEditabeMappingInfo: EditableMappingInfo, - updates: List[EditableMappingUpdateAction], + updates: List[UpdateAction], dry: Boolean = false)(implicit ec: ExecutionContext): Fox[EditableMappingInfo] = for { updatedEditableMappingInfo: EditableMappingInfo <- updateIter(Some(existingEditabeMappingInfo), updates) - _ <- Fox.runIf(!dry)(flushToFossil(updatedEditableMappingInfo)) + _ <- Fox.runIf(!dry)(flushBuffersToFossil()) + _ <- Fox.runIf(!dry)(flushUpdatedInfoToFossil(updatedEditableMappingInfo)) } yield updatedEditableMappingInfo - private def flushToFossil(updatedEditableMappingInfo: EditableMappingInfo)(implicit ec: ExecutionContext): Fox[Unit] = + def flushBuffersToFossil()(implicit ec: ExecutionContext): Fox[Unit] = for { _ <- Fox.serialCombined(segmentToAgglomerateBuffer.keys.toList)(flushSegmentToAgglomerateChunk) _ <- Fox.serialCombined(agglomerateToGraphBuffer.keys.toList)(flushAgglomerateGraph) - _ <- tracingDataStore.editableMappingsInfo.put(editableMappingId, newVersion, updatedEditableMappingInfo) + } yield () + + private def flushUpdatedInfoToFossil(updatedEditableMappingInfo: EditableMappingInfo): Fox[Unit] = + for { + _ <- tracingDataStore.editableMappingsInfo.put(tracingId, newVersion, updatedEditableMappingInfo) } yield () private def flushSegmentToAgglomerateChunk(key: String): Fox[Unit] = { - val chunk = segmentToAgglomerateBuffer(key) - val proto = SegmentToAgglomerateProto(chunk.toVector.map { segmentAgglomerateTuple => - SegmentAgglomeratePair(segmentAgglomerateTuple._1, segmentAgglomerateTuple._2) - }) - tracingDataStore.editableMappingsSegmentToAgglomerate.put(key, newVersion, proto.toByteArray) + val (chunk, isToBeReverted) = segmentToAgglomerateBuffer(key) + val valueToFlush: Array[Byte] = + if (isToBeReverted) revertedValue + else { + val proto = SegmentToAgglomerateChunkProto(chunk.toVector.map { segmentAgglomerateTuple => + SegmentAgglomeratePair(segmentAgglomerateTuple._1, segmentAgglomerateTuple._2) + }) + proto.toByteArray + } + tracingDataStore.editableMappingsSegmentToAgglomerate.put(key, newVersion, valueToFlush) } private def flushAgglomerateGraph(key: String): Fox[Unit] = { - val graph = agglomerateToGraphBuffer(key) - tracingDataStore.editableMappingsAgglomerateToGraph.put(key, newVersion, graph) + val (graph, isToBeReverted) = agglomerateToGraphBuffer(key) + val valueToFlush: Array[Byte] = if (isToBeReverted) revertedValue else graph + tracingDataStore.editableMappingsAgglomerateToGraph.put(key, newVersion, valueToFlush) } - private def updateIter(mappingFox: Fox[EditableMappingInfo], remainingUpdates: List[EditableMappingUpdateAction])( + private def updateIter(mappingFox: Fox[EditableMappingInfo], remainingUpdates: List[UpdateAction])( implicit ec: ExecutionContext): Fox[EditableMappingInfo] = mappingFox.futureBox.flatMap { case Empty => @@ -97,13 +115,14 @@ class EditableMappingUpdater( mappingFox } - private def applyOneUpdate(mapping: EditableMappingInfo, update: EditableMappingUpdateAction)( + def applyOneUpdate(mapping: EditableMappingInfo, update: UpdateAction)( implicit ec: ExecutionContext): Fox[EditableMappingInfo] = update match { case splitAction: SplitAgglomerateUpdateAction => applySplitAction(mapping, splitAction) ?~> "Failed to apply split action" case mergeAction: MergeAgglomerateUpdateAction => applyMergeAction(mapping, mergeAction) ?~> "Failed to apply merge action" + case _ => Fox.failure("this is not an editable mapping update action!") } private def applySplitAction(editableMappingInfo: EditableMappingInfo, update: SplitAgglomerateUpdateAction)( @@ -112,22 +131,20 @@ class EditableMappingUpdater( segmentId1 <- editableMappingService.findSegmentIdAtPositionIfNeeded(remoteFallbackLayer, update.segmentPosition1, update.segmentId1, - update.mag, - userToken) + update.mag)(tokenContext) segmentId2 <- editableMappingService.findSegmentIdAtPositionIfNeeded(remoteFallbackLayer, update.segmentPosition2, update.segmentId2, - update.mag, - userToken) - agglomerateId <- agglomerateIdForSplitAction(update, segmentId1) + update.mag)(tokenContext) + agglomerateId <- agglomerateIdForSegmentId(segmentId1) agglomerateGraph <- agglomerateGraphForIdWithFallback(editableMappingInfo, agglomerateId) _ = if (segmentId1 == 0) logger.warn( - s"Split action for editable mapping $editableMappingId: Looking up segment id at position ${update.segmentPosition1} in mag ${update.mag} returned invalid value zero. Splitting outside of dataset?") + s"Split action for editable mapping $tracingId: Looking up segment id at position ${update.segmentPosition1} in mag ${update.mag} returned invalid value zero. Splitting outside of dataset?") _ = if (segmentId2 == 0) logger.warn( - s"Split action for editable mapping $editableMappingId: Looking up segment id at position ${update.segmentPosition2} in mag ${update.mag} returned invalid value zero. Splitting outside of dataset?") - (graph1, graph2) <- tryo(splitGraph(agglomerateId, agglomerateGraph, update, segmentId1, segmentId2)) ?~> s"splitGraph failed while removing edge between segments $segmentId1 and $segmentId2" + s"Split action for editable mapping $tracingId: Looking up segment id at position ${update.segmentPosition2} in mag ${update.mag} returned invalid value zero. Splitting outside of dataset?") + (graph1, graph2) <- tryo(splitGraph(agglomerateGraph, segmentId1, segmentId2)) ?~> s"splitGraph failed while removing edge between segments $segmentId1 and $segmentId2" largestExistingAgglomerateId <- largestAgglomerateId(editableMappingInfo) agglomerateId2 = largestExistingAgglomerateId + 1L _ <- updateSegmentToAgglomerate(graph2.segments, agglomerateId2) @@ -135,41 +152,33 @@ class EditableMappingUpdater( _ = updateAgglomerateGraph(agglomerateId2, graph2) } yield editableMappingInfo.withLargestAgglomerateId(agglomerateId2) - private def agglomerateIdForSplitAction(updateAction: SplitAgglomerateUpdateAction, segmentId1: Long)( - implicit ec: ExecutionContext): Fox[Long] = - if (relyOnAgglomerateIds) { - Fox.successful(updateAction.agglomerateId) - } else { - agglomerateIdForSegmentId(segmentId1) + private def getFromSegmentToAgglomerateBuffer(chunkKey: String): Option[Map[Long, Long]] = + segmentToAgglomerateBuffer.get(chunkKey).flatMap { + case (chunkFromBuffer, isToBeReverted) => + if (isToBeReverted) None else Some(chunkFromBuffer) } - private def agglomerateIdsForMergeAction(updateAction: MergeAgglomerateUpdateAction, - segmentId1: Long, - segmentId2: Long)(implicit ec: ExecutionContext): Fox[(Long, Long)] = - if (relyOnAgglomerateIds) { - Fox.successful((updateAction.agglomerateId1, updateAction.agglomerateId2)) - } else { - for { - agglomerateId1 <- agglomerateIdForSegmentId(segmentId1) - agglomerateId2 <- agglomerateIdForSegmentId(segmentId2) - } yield (agglomerateId1, agglomerateId2) + private def getFromAgglomerateToGraphBuffer(chunkKey: String): Option[AgglomerateGraph] = + agglomerateToGraphBuffer.get(chunkKey).flatMap { + case (graphFromBuffer, isToBeReverted) => + if (isToBeReverted) None else Some(graphFromBuffer) } private def agglomerateIdForSegmentId(segmentId: Long)(implicit ec: ExecutionContext): Fox[Long] = { val chunkId = segmentId / editableMappingService.defaultSegmentToAgglomerateChunkSize - val chunkKey = editableMappingService.segmentToAgglomerateKey(editableMappingId, chunkId) - val chunkFromBufferOpt = segmentToAgglomerateBuffer.get(chunkKey) + val chunkKey = segmentToAgglomerateKey(tracingId, chunkId) + val chunkFromBufferOpt = getFromSegmentToAgglomerateBuffer(chunkKey) for { chunk <- Fox.fillOption(chunkFromBufferOpt) { editableMappingService - .getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId, version = oldVersion) + .getSegmentToAgglomerateChunkWithEmptyFallback(tracingId, chunkId, version = oldVersion) .map(_.toMap) } agglomerateId <- chunk.get(segmentId) match { case Some(agglomerateId) => Fox.successful(agglomerateId) case None => editableMappingService - .getBaseSegmentToAgglomerate(baseMappingName, Set(segmentId), remoteFallbackLayer, userToken) + .getBaseSegmentToAgglomerate(baseMappingName, Set(segmentId), remoteFallbackLayer)(tokenContext) .flatMap(baseSegmentToAgglomerate => baseSegmentToAgglomerate.get(segmentId)) } } yield agglomerateId @@ -188,45 +197,43 @@ class EditableMappingUpdater( private def updateSegmentToAgglomerateChunk(agglomerateId: Long, chunkId: Long, segmentIdsToUpdate: Seq[Long])( implicit ec: ExecutionContext): Fox[Unit] = for { - existingChunk: Map[Long, Long] <- getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId) ?~> "failed to get old segment to agglomerate chunk for updating it" + existingChunk: Map[Long, Long] <- getSegmentToAgglomerateChunkWithEmptyFallback(tracingId, chunkId) ?~> "failed to get old segment to agglomerate chunk for updating it" mergedMap = existingChunk ++ segmentIdsToUpdate.map(_ -> agglomerateId).toMap - _ = segmentToAgglomerateBuffer.put(editableMappingService.segmentToAgglomerateKey(editableMappingId, chunkId), - mergedMap) + _ = segmentToAgglomerateBuffer.put(segmentToAgglomerateKey(tracingId, chunkId), (mergedMap, false)) } yield () - private def getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId: String, chunkId: Long)( + private def getSegmentToAgglomerateChunkWithEmptyFallback(tracingId: String, chunkId: Long)( implicit ec: ExecutionContext): Fox[Map[Long, Long]] = { - val key = editableMappingService.segmentToAgglomerateKey(editableMappingId, chunkId) - val fromBufferOpt = segmentToAgglomerateBuffer.get(key) + val key = segmentToAgglomerateKey(tracingId, chunkId) + val fromBufferOpt = getFromSegmentToAgglomerateBuffer(key) Fox.fillOption(fromBufferOpt) { editableMappingService - .getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId, version = oldVersion) + .getSegmentToAgglomerateChunkWithEmptyFallback(tracingId, chunkId, version = oldVersion) .map(_.toMap) } } private def agglomerateGraphForIdWithFallback(mapping: EditableMappingInfo, agglomerateId: Long)( implicit ec: ExecutionContext): Fox[AgglomerateGraph] = { - val key = editableMappingService.agglomerateGraphKey(editableMappingId, agglomerateId) - val fromBufferOpt = agglomerateToGraphBuffer.get(key) + val key = agglomerateGraphKey(tracingId, agglomerateId) + val fromBufferOpt = getFromAgglomerateToGraphBuffer(key) fromBufferOpt.map(Fox.successful(_)).getOrElse { editableMappingService.getAgglomerateGraphForIdWithFallback(mapping, - editableMappingId, - Some(oldVersion), + tracingId, + oldVersion, agglomerateId, - remoteFallbackLayer, - userToken) + remoteFallbackLayer)(tokenContext) } } private def updateAgglomerateGraph(agglomerateId: Long, graph: AgglomerateGraph): Unit = { - val key = editableMappingService.agglomerateGraphKey(editableMappingId, agglomerateId) - agglomerateToGraphBuffer.put(key, graph) + val key = agglomerateGraphKey(tracingId, agglomerateId) + agglomerateToGraphBuffer.put(key, (graph, false)) } - private def splitGraph(agglomerateId: Long, - agglomerateGraph: AgglomerateGraph, - update: SplitAgglomerateUpdateAction, + private def emptyAgglomerateGraph = AgglomerateGraph(Seq(), Seq(), Seq(), Seq()) + + private def splitGraph(agglomerateGraph: AgglomerateGraph, segmentId1: Long, segmentId2: Long): (AgglomerateGraph, AgglomerateGraph) = { val edgesAndAffinitiesMinusOne: Seq[(AgglomerateEdge, Float)] = @@ -235,11 +242,7 @@ class EditableMappingUpdater( (from == segmentId1 && to == segmentId2) || (from == segmentId2 && to == segmentId1) } if (edgesAndAffinitiesMinusOne.length == agglomerateGraph.edges.length) { - if (relyOnAgglomerateIds) { - logger.warn( - s"Split action for editable mapping $editableMappingId: Edge to remove ($segmentId1 at ${update.segmentPosition1} in mag ${update.mag} to $segmentId2 at ${update.segmentPosition2} in mag ${update.mag} in agglomerate $agglomerateId) already absent. This split becomes a no-op.") - } - (agglomerateGraph, AgglomerateGraph(Seq(), Seq(), Seq(), Seq())) + (agglomerateGraph, emptyAgglomerateGraph) } else { val graph1Nodes: Set[Long] = computeConnectedComponent(startNode = segmentId1, @@ -308,8 +311,7 @@ class EditableMappingUpdater( private def largestAgglomerateId(mapping: EditableMappingInfo): Fox[Long] = for { largestBaseAgglomerateId <- remoteDatastoreClient.getLargestAgglomerateId(remoteFallbackLayer, - mapping.baseMappingName, - userToken) + mapping.baseMappingName)(tokenContext) } yield math.max(mapping.largestAgglomerateId, largestBaseAgglomerateId) private def applyMergeAction(mapping: EditableMappingInfo, update: MergeAgglomerateUpdateAction)( @@ -318,30 +320,23 @@ class EditableMappingUpdater( segmentId1 <- editableMappingService.findSegmentIdAtPositionIfNeeded(remoteFallbackLayer, update.segmentPosition1, update.segmentId1, - update.mag, - userToken) + update.mag)(tokenContext) segmentId2 <- editableMappingService.findSegmentIdAtPositionIfNeeded(remoteFallbackLayer, update.segmentPosition2, update.segmentId2, - update.mag, - userToken) + update.mag)(tokenContext) _ = if (segmentId1 == 0) logger.warn( - s"Merge action for editable mapping $editableMappingId: Looking up segment id at position ${update.segmentPosition1} in mag ${update.mag} returned invalid value zero. Merging outside of dataset?") + s"Merge action for editable mapping $tracingId: Looking up segment id at position ${update.segmentPosition1} in mag ${update.mag} returned invalid value zero. Merging outside of dataset?") _ = if (segmentId2 == 0) logger.warn( - s"Merge action for editable mapping $editableMappingId: Looking up segment id at position ${update.segmentPosition2} in mag ${update.mag} returned invalid value zero. Merging outside of dataset?") - (agglomerateId1, agglomerateId2) <- agglomerateIdsForMergeAction(update, segmentId1, segmentId2) ?~> "Failed to look up agglomerate ids for merge action segments" + s"Merge action for editable mapping $tracingId: Looking up segment id at position ${update.segmentPosition2} in mag ${update.mag} returned invalid value zero. Merging outside of dataset?") + agglomerateId1 <- agglomerateIdForSegmentId(segmentId1) ?~> "Failed to look up agglomerate ids for merge action segments" + agglomerateId2 <- agglomerateIdForSegmentId(segmentId2) ?~> "Failed to look up agglomerate ids for merge action segments" agglomerateGraph1 <- agglomerateGraphForIdWithFallback(mapping, agglomerateId1) ?~> s"Failed to get agglomerate graph for id $agglomerateId1" agglomerateGraph2 <- agglomerateGraphForIdWithFallback(mapping, agglomerateId2) ?~> s"Failed to get agglomerate graph for id $agglomerateId2" _ <- bool2Fox(agglomerateGraph2.segments.contains(segmentId2)) ?~> s"Segment $segmentId2 as queried by position ${update.segmentPosition2} is not contained in fetched agglomerate graph for agglomerate $agglomerateId2" - mergedGraphOpt = mergeGraph(agglomerateGraph1, - agglomerateGraph2, - update, - agglomerateId1, - agglomerateId2, - segmentId1, - segmentId2) + mergedGraphOpt = mergeGraph(agglomerateGraph1, agglomerateGraph2, segmentId1, segmentId2) _ <- Fox.runOptional(mergedGraphOpt) { mergedGraph => for { _ <- updateSegmentToAgglomerate(agglomerateGraph2.segments, agglomerateId1) ?~> s"Failed to update segment to agglomerate buffer" @@ -353,15 +348,10 @@ class EditableMappingUpdater( private def mergeGraph(agglomerateGraph1: AgglomerateGraph, agglomerateGraph2: AgglomerateGraph, - update: MergeAgglomerateUpdateAction, - agglomerateId1: Long, - agglomerateId2: Long, segmentId1: Long, segmentId2: Long): Option[AgglomerateGraph] = { val segment1IsValid = agglomerateGraph1.segments.contains(segmentId1) val segment2IsValid = agglomerateGraph2.segments.contains(segmentId2) - warnOnInvalidSegmentToMerge(segment1IsValid, segmentId1, update.segmentPosition1, update.mag, agglomerateId1) - warnOnInvalidSegmentToMerge(segment2IsValid, segmentId2, update.segmentPosition2, update.mag, agglomerateId2) if (segment1IsValid && segment2IsValid) { val newEdge = AgglomerateEdge(segmentId1, segmentId2) val newEdgeAffinity = 255.0f @@ -375,15 +365,59 @@ class EditableMappingUpdater( } else None } - private def warnOnInvalidSegmentToMerge(isValid: Boolean, - segmentId: Long, - position: Option[Vec3Int], - mag: Vec3Int, - agglomerateId: Long): Unit = - if (!isValid && relyOnAgglomerateIds) { - logger.warn( - s"Merge action for editable mapping $editableMappingId: segment $segmentId as looked up at $position in mag $mag is not present in agglomerate $agglomerateId. This merge becomes a no-op" - ) - } + def revertToVersion(sourceVersion: Long)(implicit ec: ExecutionContext): Fox[Unit] = + for { + _ <- bool2Fox(sourceVersion <= oldVersion) ?~> "trying to revert editable mapping to a version not yet present in the database" + _ = segmentToAgglomerateBuffer.clear() + _ = agglomerateToGraphBuffer.clear() + segmentToAgglomerateChunkNewestStream = new VersionedSegmentToAgglomerateChunkIterator( + tracingId, + tracingDataStore.editableMappingsSegmentToAgglomerate) + _ <- Fox.serialCombined(segmentToAgglomerateChunkNewestStream) { + case (chunkKey, _, version) => + if (version > sourceVersion) { + editableMappingService.getSegmentToAgglomerateChunk(chunkKey, Some(sourceVersion)).futureBox.map { + case Full(chunkData) => segmentToAgglomerateBuffer.put(chunkKey, (chunkData.toMap, false)) + case Empty => segmentToAgglomerateBuffer.put(chunkKey, (Map[Long, Long](), true)) + case Failure(msg, _, chain) => + Fox.failure(msg, Empty, chain) + } + } else Fox.successful(()) + } + agglomerateToGraphNewestStream = new VersionedAgglomerateToGraphIterator( + tracingId, + tracingDataStore.editableMappingsAgglomerateToGraph) + _ <- Fox.serialCombined(agglomerateToGraphNewestStream) { + case (graphKey, _, version) => + if (version > sourceVersion) { + for { + agglomerateId <- agglomerateIdFromAgglomerateGraphKey(graphKey) + _ <- editableMappingService + .getAgglomerateGraphForId(tracingId, sourceVersion, agglomerateId) + .futureBox + .map { + case Full(graphData) => agglomerateToGraphBuffer.put(graphKey, (graphData, false)) + case Empty => agglomerateToGraphBuffer.put(graphKey, (emptyAgglomerateGraph, true)) + case Failure(msg, _, chain) => + Fox.failure(msg, Empty, chain) + } + } yield () + } else Fox.successful(()) + } + } yield () + def newWithTargetVersion(currentMaterializedVersion: Long, targetVersion: Long): EditableMappingUpdater = + new EditableMappingUpdater( + annotationId, + tracingId, + baseMappingName, + currentMaterializedVersion, + targetVersion, + remoteFallbackLayer, + tokenContext, + remoteDatastoreClient, + editableMappingService, + annotationService, + tracingDataStore + ) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index 66154842b1d..e88746baa76 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -7,139 +7,41 @@ import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.{ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis -import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore -import com.scalableminds.webknossos.tracingstore.tracings.UpdateAction.SkeletonUpdateAction import com.scalableminds.webknossos.tracingstore.tracings._ -import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating._ -import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats -import net.liftweb.common.{Box, Empty, Full} -import play.api.i18n.MessagesProvider -import play.api.libs.json.{JsObject, JsValue, Json} +import net.liftweb.common.{Box, Full} import scala.concurrent.ExecutionContext class SkeletonTracingService @Inject()( tracingDataStore: TracingDataStore, - val temporaryTracingStore: TemporaryTracingStore[SkeletonTracing], - val handledGroupIdStore: TracingStoreRedisStore, - val temporaryTracingIdStore: TracingStoreRedisStore, - val uncommittedUpdatesStore: TracingStoreRedisStore, - val tracingMigrationService: SkeletonTracingMigrationService)(implicit val ec: ExecutionContext) - extends TracingService[SkeletonTracing] - with KeyValueStoreImplicits + temporaryTracingService: TemporaryTracingService +)(implicit val ec: ExecutionContext) + extends KeyValueStoreImplicits with ProtoGeometryImplicits + with BoundingBoxMerger + with ColorGenerator with FoxImplicits { - val tracingType: TracingType.Value = TracingType.skeleton - - val tracingStore: FossilDBClient = tracingDataStore.skeletons - implicit val tracingCompanion: SkeletonTracing.type = SkeletonTracing - implicit val updateActionJsonFormat: SkeletonUpdateAction.skeletonUpdateActionFormat.type = - SkeletonUpdateAction.skeletonUpdateActionFormat - - def currentVersion(tracingId: String): Fox[Long] = - tracingDataStore.skeletonUpdates.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) - - def currentVersion(tracing: SkeletonTracing): Long = tracing.version - - def handleUpdateGroup(tracingId: String, - updateActionGroup: UpdateActionGroup[SkeletonTracing], - previousVersion: Long, - userToken: Option[String]): Fox[_] = - tracingDataStore.skeletonUpdates.put( - tracingId, - updateActionGroup.version, - updateActionGroup.actions - .map(_.addTimestamp(updateActionGroup.timestamp).addAuthorId(updateActionGroup.authorId)) match { //to the first action in the group, attach the group's info - case Nil => Nil - case first :: rest => first.addInfo(updateActionGroup.info) :: rest - } - ) - - override def applyPendingUpdates(tracing: SkeletonTracing, - tracingId: String, - desiredVersion: Option[Long]): Fox[SkeletonTracing] = { - val existingVersion = tracing.version - findDesiredOrNewestPossibleVersion(tracing, tracingId, desiredVersion).flatMap { newVersion => - if (newVersion > existingVersion) { - for { - pendingUpdates <- findPendingUpdates(tracingId, existingVersion, newVersion) - updatedTracing <- update(tracing, tracingId, pendingUpdates, newVersion) - _ <- save(updatedTracing, Some(tracingId), newVersion) - } yield updatedTracing - } else { - Full(tracing) - } - } - } - - private def findDesiredOrNewestPossibleVersion(tracing: SkeletonTracing, - tracingId: String, - desiredVersion: Option[Long]): Fox[Long] = - /* - * Determines the newest saved version from the updates column. - * if there are no updates at all, assume tracing is brand new (possibly created from NML, - * hence the emptyFallbck tracing.version) - */ - for { - newestUpdateVersion <- tracingDataStore.skeletonUpdates.getVersion(tracingId, - mayBeEmpty = Some(true), - emptyFallback = Some(tracing.version)) - } yield { - desiredVersion match { - case None => newestUpdateVersion - case Some(desiredSome) => math.min(desiredSome, newestUpdateVersion) - } - } - - private def findPendingUpdates(tracingId: String, - existingVersion: Long, - desiredVersion: Long): Fox[List[SkeletonUpdateAction]] = - if (desiredVersion == existingVersion) Fox.successful(List()) - else { - for { - updateActionGroups <- tracingDataStore.skeletonUpdates.getMultipleVersions( - tracingId, - Some(desiredVersion), - Some(existingVersion + 1))(fromJsonBytes[List[SkeletonUpdateAction]]) - } yield updateActionGroups.reverse.flatten - } - - private def update(tracing: SkeletonTracing, - tracingId: String, - updates: List[SkeletonUpdateAction], - newVersion: Long): Fox[SkeletonTracing] = { - def updateIter(tracingFox: Fox[SkeletonTracing], - remainingUpdates: List[SkeletonUpdateAction]): Fox[SkeletonTracing] = - tracingFox.futureBox.flatMap { - case Empty => Fox.empty - case Full(tracing) => - remainingUpdates match { - case List() => Fox.successful(tracing) - case RevertToVersionAction(sourceVersion, _, _, _) :: tail => - val sourceTracing = find(tracingId, Some(sourceVersion), useCache = false, applyUpdates = true) - updateIter(sourceTracing, tail) - case update :: tail => updateIter(Full(update.applyOn(tracing)), tail) - } - case _ => tracingFox - } - - updates match { - case List() => Full(tracing) - case _ :: _ => - for { - updated <- updateIter(Some(tracing), updates) - } yield updated.withVersion(newVersion) + def saveSkeleton(tracing: SkeletonTracing, + tracingId: Option[String], + version: Long, + toTemporaryStore: Boolean = false): Fox[String] = { + val id = tracingId.getOrElse(TracingId.generate) + if (toTemporaryStore) { + temporaryTracingService.saveSkeleton(id, tracing).map(_ => id) + } else { + tracingDataStore.skeletons.put(id, version, tracing).map(_ => id) } } - def duplicate(tracing: SkeletonTracing, - fromTask: Boolean, - editPosition: Option[Vec3Int], - editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox]): Fox[String] = { + def adaptSkeletonForDuplicate(tracing: SkeletonTracing, + fromTask: Boolean, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double], + boundingBox: Option[BoundingBox], + newVersion: Long): SkeletonTracing = { val taskBoundingBox = if (fromTask) { tracing.boundingBox.map { bb => val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 @@ -154,22 +56,19 @@ class SkeletonTracingService @Inject()( editPosition = editPosition.map(vec3IntToProto).getOrElse(tracing.editPosition), editRotation = editRotation.map(vec3DoubleToProto).getOrElse(tracing.editRotation), boundingBox = boundingBoxOptToProto(boundingBox).orElse(tracing.boundingBox), - version = 0 + version = newVersion ) .addAllUserBoundingBoxes(taskBoundingBox) - val finalTracing = if (fromTask) newTracing.clearBoundingBox else newTracing - save(finalTracing, None, finalTracing.version) + if (fromTask) newTracing.clearBoundingBox else newTracing } - def merge(tracings: Seq[SkeletonTracing], - mergedVolumeStats: MergedVolumeStats, - newEditableMappingIdOpt: Option[String]): Box[SkeletonTracing] = + def merge(tracings: Seq[SkeletonTracing], newVersion: Long): Box[SkeletonTracing] = for { tracing <- tracings.map(Full(_)).reduceLeft(mergeTwo) } yield tracing.copy( createdTimestamp = System.currentTimeMillis(), - version = 0L, + version = newVersion, ) private def mergeTwo(tracingA: Box[SkeletonTracing], tracingB: Box[SkeletonTracing]): Box[SkeletonTracing] = @@ -198,59 +97,12 @@ class SkeletonTracingService @Inject()( ) // Can be removed again when https://github.com/scalableminds/webknossos/issues/5009 is fixed - override def remapTooLargeTreeIds(skeletonTracing: SkeletonTracing): SkeletonTracing = + def remapTooLargeTreeIds(skeletonTracing: SkeletonTracing): SkeletonTracing = if (skeletonTracing.trees.exists(_.treeId > 1048576)) { val newTrees = for ((tree, index) <- skeletonTracing.trees.zipWithIndex) yield tree.withTreeId(index + 1) skeletonTracing.withTrees(newTrees) } else skeletonTracing - def mergeVolumeData(tracingSelectors: Seq[TracingSelector], - tracings: Seq[SkeletonTracing], - newId: String, - newVersion: Long, - toCache: Boolean, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[MergedVolumeStats] = - Fox.successful(MergedVolumeStats.empty()) - - def updateActionLog(tracingId: String, newestVersion: Option[Long], oldestVersion: Option[Long]): Fox[JsValue] = { - def versionedTupleToJson(tuple: (Long, List[SkeletonUpdateAction])): JsObject = - Json.obj( - "version" -> tuple._1, - "value" -> Json.toJson(tuple._2) - ) - for { - updateActionGroups <- tracingDataStore.skeletonUpdates.getMultipleVersionsAsVersionValueTuple( - tracingId, - newestVersion, - oldestVersion)(fromJsonBytes[List[SkeletonUpdateAction]]) - updateActionGroupsJs = updateActionGroups.map(versionedTupleToJson) - } yield Json.toJson(updateActionGroupsJs) - } - - def updateActionStatistics(tracingId: String): Fox[JsObject] = - for { - updateActionGroups <- tracingDataStore.skeletonUpdates.getMultipleVersions(tracingId)( - fromJsonBytes[List[SkeletonUpdateAction]]) - updateActions = updateActionGroups.flatten - } yield { - Json.obj( - "updateTracingActionCount" -> updateActions.count { - case _: UpdateTracingSkeletonAction => true - case _ => false - }, - "createNodeActionCount" -> updateActions.count { - case _: CreateNodeSkeletonAction => true - case _ => false - }, - "deleteNodeActionCount" -> updateActions.count { - case _: DeleteNodeSkeletonAction => true - case _ => false - } - ) - } - def dummyTracing: SkeletonTracing = SkeletonTracingDefaults.createInstance - def mergeEditableMappings(tracingsWithIds: List[(SkeletonTracing, String)], userToken: Option[String]): Fox[String] = - Fox.empty } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala index c34e5ae8c74..1ca66e9f5cd 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala @@ -1,13 +1,18 @@ package com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating -import com.scalableminds.webknossos.datastore.SkeletonTracing._ import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} +import com.scalableminds.webknossos.datastore.SkeletonTracing.{Edge, Node, SkeletonTracing, Tree, TreeGroup} import com.scalableminds.webknossos.datastore.helpers.{NodeDefaults, ProtoGeometryImplicits} import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate +import com.scalableminds.webknossos.tracingstore.annotation.{LayerUpdateAction, UpdateAction} import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.TreeType.TreeType import play.api.libs.json._ +trait SkeletonUpdateAction extends LayerUpdateAction { + def applyOn(tracing: SkeletonTracing): SkeletonTracing +} + case class CreateTreeSkeletonAction(id: Int, color: Option[com.scalableminds.util.image.Color], name: String, @@ -16,13 +21,14 @@ case class CreateTreeSkeletonAction(id: Int, comments: List[UpdateActionComment], groupId: Option[Int], isVisible: Option[Boolean], + `type`: Option[TreeType] = None, + edgesAreVisible: Option[Boolean], + metadata: Option[Seq[MetadataEntry]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - `type`: Option[TreeType] = None, - edgesAreVisible: Option[Boolean] = None, - metadata: Option[Seq[MetadataEntry]] = None) - extends UpdateAction.SkeletonUpdateAction + info: Option[String] = None) + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { val newTree = Tree( @@ -43,26 +49,31 @@ case class CreateTreeSkeletonAction(id: Int, tracing.withTrees(newTree +: tracing.trees) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class DeleteTreeSkeletonAction(id: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { + extends SkeletonUpdateAction { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing.withTrees(tracing.trees.filter(_.treeId != id)) - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateTreeSkeletonAction(id: Int, @@ -72,12 +83,13 @@ case class UpdateTreeSkeletonAction(id: Int, branchPoints: List[UpdateActionBranchPoint], comments: List[UpdateActionComment], groupId: Option[Int], + `type`: Option[TreeType] = None, + metadata: Option[Seq[MetadataEntry]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - `type`: Option[TreeType] = None, - metadata: Option[Seq[MetadataEntry]] = None) - extends UpdateAction.SkeletonUpdateAction + info: Option[String] = None) + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = @@ -95,20 +107,24 @@ case class UpdateTreeSkeletonAction(id: Int, tracing.withTrees(mapTrees(tracing, id, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class MergeTreeSkeletonAction(sourceId: Int, targetId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { + // only nodes and edges are merged here, // other properties are managed explicitly // by the frontend with extra actions @@ -123,21 +139,25 @@ case class MergeTreeSkeletonAction(sourceId: Int, tracing.withTrees(mapTrees(tracing, targetId, treeTransform).filter(_.treeId != sourceId)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class MoveTreeComponentSkeletonAction(nodeIds: List[Int], sourceId: Int, targetId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { + // this should only move a whole component, // that is disjoint from the rest of the tree override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { @@ -161,51 +181,60 @@ case class MoveTreeComponentSkeletonAction(nodeIds: List[Int], tracing.withTrees(tracing.trees.map(selectTree)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class CreateEdgeSkeletonAction(source: Int, target: Int, treeId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.withEdges(Edge(source, target) +: tree.edges) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class DeleteEdgeSkeletonAction(source: Int, target: Int, treeId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.copy(edges = tree.edges.filter(_ != Edge(source, target))) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) + } case class CreateNodeSkeletonAction(id: Int, @@ -218,11 +247,12 @@ case class CreateNodeSkeletonAction(id: Int, interpolation: Option[Boolean], treeId: Int, timestamp: Long, + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) - extends UpdateAction.SkeletonUpdateAction + info: Option[String] = None) + extends SkeletonUpdateAction with SkeletonUpdateActionHelper with ProtoGeometryImplicits { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { @@ -245,11 +275,13 @@ case class CreateNodeSkeletonAction(id: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateNodeSkeletonAction(id: Int, @@ -262,11 +294,12 @@ case class UpdateNodeSkeletonAction(id: Int, interpolation: Option[Boolean], treeId: Int, timestamp: Long, + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) - extends UpdateAction.SkeletonUpdateAction + info: Option[String] = None) + extends SkeletonUpdateAction with SkeletonUpdateActionHelper with ProtoGeometryImplicits { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { @@ -291,20 +324,22 @@ case class UpdateNodeSkeletonAction(id: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class DeleteNodeSkeletonAction(nodeId: Int, treeId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { @@ -314,27 +349,32 @@ case class DeleteNodeSkeletonAction(nodeId: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateTreeGroupsSkeletonAction(treeGroups: List[UpdateActionTreeGroup], + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing.withTreeGroups(treeGroups.map(convertTreeGroup)) - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateTracingSkeletonAction(activeNode: Option[Int], @@ -342,11 +382,12 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], editRotation: com.scalableminds.util.geometry.Vec3Double, zoomLevel: Double, userBoundingBox: Option[com.scalableminds.util.geometry.BoundingBox], + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None, editPositionAdditionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with ProtoGeometryImplicits { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing.copy( @@ -358,35 +399,24 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], editPositionAdditionalCoordinates = AdditionalCoordinate.toProto(editPositionAdditionalCoordinates) ) - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - override def isViewOnlyChange: Boolean = true -} - -case class RevertToVersionAction(sourceVersion: Long, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = - throw new Exception("RevertToVersionAction applied on unversioned tracing") + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = - this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = - this.copy(actionAuthorId = authorId) + override def isViewOnlyChange: Boolean = true } -case class UpdateTreeVisibility(treeId: Int, - isVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction +case class UpdateTreeVisibilitySkeletonAction(treeId: Int, + isVisible: Boolean, + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.copy(isVisible = Some(isVisible)) @@ -394,20 +424,24 @@ case class UpdateTreeVisibility(treeId: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) + override def isViewOnlyChange: Boolean = true } -case class UpdateTreeGroupVisibility(treeGroupId: Option[Int], - isVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction +case class UpdateTreeGroupVisibilitySkeletonAction(treeGroupId: Option[Int], + isVisible: Boolean, + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def updateTreeGroups(treeGroups: Seq[TreeGroup]) = { @@ -431,56 +465,64 @@ case class UpdateTreeGroupVisibility(treeGroupId: Option[Int], } } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - override def isViewOnlyChange: Boolean = true + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } -case class UpdateTreeEdgesVisibility(treeId: Int, - edgesAreVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction +case class UpdateTreeEdgesVisibilitySkeletonAction(treeId: Int, + edgesAreVisible: Boolean, + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.copy(edgesAreVisible = Some(edgesAreVisible)) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - override def isViewOnlyChange: Boolean = true + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } -case class UpdateUserBoundingBoxes(boundingBoxes: List[NamedBoundingBox], - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { +case class UpdateUserBoundingBoxesSkeletonAction(boundingBoxes: List[NamedBoundingBox], + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends SkeletonUpdateAction { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto)) - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } -case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], - isVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { +case class UpdateUserBoundingBoxVisibilitySkeletonAction(boundingBoxId: Option[Int], + isVisible: Boolean, + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends SkeletonUpdateAction { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def updateUserBoundingBoxes() = tracing.userBoundingBoxes.map { boundingBox => @@ -493,26 +535,14 @@ case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], tracing.withUserBoundingBoxes(updateUserBoundingBoxes()) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - override def isViewOnlyChange: Boolean = true -} - -case class UpdateTdCamera(actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { - - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = - this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = - this.copy(actionAuthorId = authorId) override def isViewOnlyChange: Boolean = true } @@ -552,104 +582,22 @@ object UpdateTreeGroupsSkeletonAction { object UpdateTracingSkeletonAction { implicit val jsonFormat: OFormat[UpdateTracingSkeletonAction] = Json.format[UpdateTracingSkeletonAction] } -object RevertToVersionAction { - implicit val jsonFormat: OFormat[RevertToVersionAction] = Json.format[RevertToVersionAction] -} -object UpdateTreeVisibility { - implicit val jsonFormat: OFormat[UpdateTreeVisibility] = Json.format[UpdateTreeVisibility] -} -object UpdateTreeGroupVisibility { - implicit val jsonFormat: OFormat[UpdateTreeGroupVisibility] = Json.format[UpdateTreeGroupVisibility] -} -object UpdateTreeEdgesVisibility { - implicit val jsonFormat: OFormat[UpdateTreeEdgesVisibility] = Json.format[UpdateTreeEdgesVisibility] -} -object UpdateUserBoundingBoxes { - implicit val jsonFormat: OFormat[UpdateUserBoundingBoxes] = Json.format[UpdateUserBoundingBoxes] -} -object UpdateUserBoundingBoxVisibility { - implicit val jsonFormat: OFormat[UpdateUserBoundingBoxVisibility] = Json.format[UpdateUserBoundingBoxVisibility] -} -object UpdateTdCamera { implicit val jsonFormat: OFormat[UpdateTdCamera] = Json.format[UpdateTdCamera] } - -object SkeletonUpdateAction { - - implicit object skeletonUpdateActionFormat extends Format[UpdateAction[SkeletonTracing]] { - override def reads(json: JsValue): JsResult[UpdateAction.SkeletonUpdateAction] = { - val jsonValue = (json \ "value").as[JsObject] - (json \ "name").as[String] match { - case "createTree" => deserialize[CreateTreeSkeletonAction](jsonValue) - case "deleteTree" => deserialize[DeleteTreeSkeletonAction](jsonValue) - case "updateTree" => deserialize[UpdateTreeSkeletonAction](jsonValue) - case "mergeTree" => deserialize[MergeTreeSkeletonAction](jsonValue) - case "moveTreeComponent" => deserialize[MoveTreeComponentSkeletonAction](jsonValue) - case "createNode" => deserialize[CreateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) - case "deleteNode" => deserialize[DeleteNodeSkeletonAction](jsonValue) - case "updateNode" => deserialize[UpdateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) - case "createEdge" => deserialize[CreateEdgeSkeletonAction](jsonValue) - case "deleteEdge" => deserialize[DeleteEdgeSkeletonAction](jsonValue) - case "updateTreeGroups" => deserialize[UpdateTreeGroupsSkeletonAction](jsonValue) - case "updateTracing" => deserialize[UpdateTracingSkeletonAction](jsonValue) - case "revertToVersion" => deserialize[RevertToVersionAction](jsonValue) - case "updateTreeVisibility" => deserialize[UpdateTreeVisibility](jsonValue) - case "updateTreeGroupVisibility" => deserialize[UpdateTreeGroupVisibility](jsonValue) - case "updateTreeEdgesVisibility" => deserialize[UpdateTreeEdgesVisibility](jsonValue) - case "updateUserBoundingBoxes" => deserialize[UpdateUserBoundingBoxes](jsonValue) - case "updateUserBoundingBoxVisibility" => deserialize[UpdateUserBoundingBoxVisibility](jsonValue) - case "updateTdCamera" => deserialize[UpdateTdCamera](jsonValue) - } - } - - private def deserialize[T](json: JsValue, shouldTransformPositions: Boolean = false)( - implicit tjs: Reads[T]): JsResult[T] = - if (shouldTransformPositions) - json.transform(positionTransform).get.validate[T] - else - json.validate[T] - - private val positionTransform = - (JsPath \ "position").json.update(JsPath.read[List[Float]].map(position => Json.toJson(position.map(_.toInt)))) - - override def writes(a: UpdateAction[SkeletonTracing]): JsObject = a match { - case s: CreateTreeSkeletonAction => - Json.obj("name" -> "createTree", "value" -> Json.toJson(s)(CreateTreeSkeletonAction.jsonFormat)) - case s: DeleteTreeSkeletonAction => - Json.obj("name" -> "deleteTree", "value" -> Json.toJson(s)(DeleteTreeSkeletonAction.jsonFormat)) - case s: UpdateTreeSkeletonAction => - Json.obj("name" -> "updateTree", "value" -> Json.toJson(s)(UpdateTreeSkeletonAction.jsonFormat)) - case s: MergeTreeSkeletonAction => - Json.obj("name" -> "mergeTree", "value" -> Json.toJson(s)(MergeTreeSkeletonAction.jsonFormat)) - case s: MoveTreeComponentSkeletonAction => - Json.obj("name" -> "moveTreeComponent", "value" -> Json.toJson(s)(MoveTreeComponentSkeletonAction.jsonFormat)) - case s: CreateNodeSkeletonAction => - Json.obj("name" -> "createNode", "value" -> Json.toJson(s)(CreateNodeSkeletonAction.jsonFormat)) - case s: DeleteNodeSkeletonAction => - Json.obj("name" -> "deleteNode", "value" -> Json.toJson(s)(DeleteNodeSkeletonAction.jsonFormat)) - case s: UpdateNodeSkeletonAction => - Json.obj("name" -> "updateNode", "value" -> Json.toJson(s)(UpdateNodeSkeletonAction.jsonFormat)) - case s: CreateEdgeSkeletonAction => - Json.obj("name" -> "createEdge", "value" -> Json.toJson(s)(CreateEdgeSkeletonAction.jsonFormat)) - case s: DeleteEdgeSkeletonAction => - Json.obj("name" -> "deleteEdge", "value" -> Json.toJson(s)(DeleteEdgeSkeletonAction.jsonFormat)) - case s: UpdateTreeGroupsSkeletonAction => - Json.obj("name" -> "updateTreeGroups", "value" -> Json.toJson(s)(UpdateTreeGroupsSkeletonAction.jsonFormat)) - case s: UpdateTracingSkeletonAction => - Json.obj("name" -> "updateTracing", "value" -> Json.toJson(s)(UpdateTracingSkeletonAction.jsonFormat)) - case s: RevertToVersionAction => - Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionAction.jsonFormat)) - case s: UpdateTreeVisibility => - Json.obj("name" -> "updateTreeVisibility", "value" -> Json.toJson(s)(UpdateTreeVisibility.jsonFormat)) - case s: UpdateTreeGroupVisibility => - Json.obj("name" -> "updateTreeGroupVisibility", "value" -> Json.toJson(s)(UpdateTreeGroupVisibility.jsonFormat)) - case s: UpdateTreeEdgesVisibility => - Json.obj("name" -> "updateTreeEdgesVisibility", "value" -> Json.toJson(s)(UpdateTreeEdgesVisibility.jsonFormat)) - case s: UpdateUserBoundingBoxes => - Json.obj("name" -> "updateUserBoundingBoxes", "value" -> Json.toJson(s)(UpdateUserBoundingBoxes.jsonFormat)) - case s: UpdateUserBoundingBoxVisibility => - Json.obj("name" -> "updateUserBoundingBoxVisibility", - "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibility.jsonFormat)) - case s: UpdateTdCamera => - Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCamera.jsonFormat)) - } - } +object UpdateTreeVisibilitySkeletonAction { + implicit val jsonFormat: OFormat[UpdateTreeVisibilitySkeletonAction] = Json.format[UpdateTreeVisibilitySkeletonAction] +} +object UpdateTreeGroupVisibilitySkeletonAction { + implicit val jsonFormat: OFormat[UpdateTreeGroupVisibilitySkeletonAction] = + Json.format[UpdateTreeGroupVisibilitySkeletonAction] +} +object UpdateTreeEdgesVisibilitySkeletonAction { + implicit val jsonFormat: OFormat[UpdateTreeEdgesVisibilitySkeletonAction] = + Json.format[UpdateTreeEdgesVisibilitySkeletonAction] +} +object UpdateUserBoundingBoxesSkeletonAction { + implicit val jsonFormat: OFormat[UpdateUserBoundingBoxesSkeletonAction] = + Json.format[UpdateUserBoundingBoxesSkeletonAction] +} +object UpdateUserBoundingBoxVisibilitySkeletonAction { + implicit val jsonFormat: OFormat[UpdateUserBoundingBoxVisibilitySkeletonAction] = + Json.format[UpdateUserBoundingBoxVisibilitySkeletonAction] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala index 3b3c99db295..1f3c716ffd7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox @@ -15,6 +16,7 @@ import com.scalableminds.webknossos.datastore.models.{ WebknossosAdHocMeshRequest } import com.scalableminds.webknossos.datastore.services.{FullMeshHelper, FullMeshRequest} +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings.FallbackDataHelper import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} @@ -25,6 +27,7 @@ import scala.concurrent.ExecutionContext class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, editableMappingService: EditableMappingService, + annotationService: TSAnnotationService, volumeSegmentIndexService: VolumeSegmentIndexService, val remoteDatastoreClient: TSRemoteDatastoreClient, val remoteWebknossosClient: TSRemoteWebknossosClient) @@ -33,66 +36,70 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, with FullMeshHelper with LazyLogging { - def loadFor(token: Option[String], tracingId: String, fullMeshRequest: FullMeshRequest)( - implicit ec: ExecutionContext): Fox[Array[Byte]] = + def loadFor(annotationId: String, tracingId: String, fullMeshRequest: FullMeshRequest)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Array[Byte]] = for { - tracing <- volumeTracingService.find(tracingId) ?~> "tracing.notFound" + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> "tracing.notFound" data <- if (fullMeshRequest.meshFileName.isDefined) - loadFullMeshFromMeshfile(token, tracing, tracingId, fullMeshRequest) - else loadFullMeshFromAdHoc(token, tracing, tracingId, fullMeshRequest) + loadFullMeshFromMeshfile(annotationId, tracingId, tracing, fullMeshRequest) + else loadFullMeshFromAdHoc(annotationId, tracingId, tracing, fullMeshRequest) } yield data private def loadFullMeshFromMeshfile( - token: Option[String], - tracing: VolumeTracing, + annotationId: String, tracingId: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + tracing: VolumeTracing, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Array[Byte]] = for { remoteFallbackLayer <- remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - baseMappingName <- volumeTracingService.baseMappingName(tracing) + baseMappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) fullMeshRequestAdapted = if (tracing.getHasEditableMapping) fullMeshRequest.copy(mappingName = baseMappingName, editableMappingTracingId = Some(tracingId), mappingType = Some("HDF5")) else fullMeshRequest - array <- remoteDatastoreClient.loadFullMeshStl(token, remoteFallbackLayer, fullMeshRequestAdapted) + array <- remoteDatastoreClient.loadFullMeshStl(remoteFallbackLayer, fullMeshRequestAdapted) } yield array - private def loadFullMeshFromAdHoc(token: Option[String], - tracing: VolumeTracing, - tracingId: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + private def loadFullMeshFromAdHoc( + annotationId: String, + tracingId: String, + tracing: VolumeTracing, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Array[Byte]] = for { mag <- fullMeshRequest.mag.toFox ?~> "mag.neededForAdHoc" _ <- bool2Fox(tracing.mags.contains(vec3IntToProto(mag))) ?~> "mag.notPresentInTracing" before = Instant.now - voxelSize <- remoteDatastoreClient.voxelSizeForTracingWithCache(tracingId, token) ?~> "voxelSize.failedToFetch" + voxelSize <- remoteDatastoreClient.voxelSizeForTracingWithCache(tracingId) ?~> "voxelSize.failedToFetch" verticesForChunks <- if (tracing.hasSegmentIndex.getOrElse(false)) - getAllAdHocChunksWithSegmentIndex(token, tracing, tracingId, mag, voxelSize, fullMeshRequest) + getAllAdHocChunksWithSegmentIndex(annotationId, tracingId, tracing, mag, voxelSize, fullMeshRequest) else - getAllAdHocChunksWithNeighborLogic(token, - tracing, - tracingId, - mag, - voxelSize, - fullMeshRequest, - fullMeshRequest.seedPosition.map(sp => VoxelPosition(sp.x, sp.y, sp.z, mag)), - adHocChunkSize) + getAllAdHocChunksWithNeighborLogic( + tracing, + annotationId, + tracingId, + mag, + voxelSize, + fullMeshRequest, + fullMeshRequest.seedPosition.map(sp => VoxelPosition(sp.x, sp.y, sp.z, mag)), + adHocChunkSize + ) encoded = verticesForChunks.map(adHocMeshToStl) array = combineEncodedChunksToStl(encoded) _ = logMeshingDuration(before, "ad-hoc meshing (tracingstore)", array.length) } yield array private def getAllAdHocChunksWithSegmentIndex( - token: Option[String], - tracing: VolumeTracing, + annotationId: String, tracingId: String, + tracing: VolumeTracing, mag: Vec3Int, voxelSize: VoxelSize, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[List[Array[Float]]] = + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[List[Array[Float]]] = for { - fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId) - mappingName <- volumeTracingService.baseMappingName(tracing) + fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId, tracing) + mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( fallbackLayer, @@ -103,8 +110,7 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, mappingName = mappingName, editableMappingTracingId = volumeTracingService.editableMappingTracingId(tracing, tracingId), fullMeshRequest.additionalCoordinates, - AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), - token + AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes) ) bucketPositions = bucketPositionsRaw.values .map(vec3IntFromProto) @@ -124,13 +130,13 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, fullMeshRequest.additionalCoordinates, findNeighbors = false ) - loadMeshChunkFromAdHoc(token, tracing, adHocMeshRequest, tracingId) + loadMeshChunkFromAdHoc(tracing, adHocMeshRequest, annotationId, tracingId) } allVertices = vertexChunksWithNeighbors.map(_._1) } yield allVertices - private def getAllAdHocChunksWithNeighborLogic(token: Option[String], - tracing: VolumeTracing, + private def getAllAdHocChunksWithNeighborLogic(tracing: VolumeTracing, + annotationId: String, tracingId: String, mag: Vec3Int, voxelSize: VoxelSize, @@ -139,7 +145,8 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, chunkSize: Vec3Int, visited: collection.mutable.Set[VoxelPosition] = collection.mutable.Set[VoxelPosition]())( - implicit ec: ExecutionContext): Fox[List[Array[Float]]] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[List[Array[Float]]] = for { topLeft <- topLeftOpt.toFox ?~> "seedPosition.neededForAdHoc" adHocMeshRequest = WebknossosAdHocMeshRequest( @@ -153,12 +160,12 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, fullMeshRequest.additionalCoordinates ) _ = visited += topLeft - (vertices: Array[Float], neighbors) <- loadMeshChunkFromAdHoc(token, tracing, adHocMeshRequest, tracingId) + (vertices: Array[Float], neighbors) <- loadMeshChunkFromAdHoc(tracing, adHocMeshRequest, annotationId, tracingId) nextPositions: List[VoxelPosition] = generateNextTopLeftsFromNeighbors(topLeft, neighbors, chunkSize, visited) _ = visited ++= nextPositions neighborVerticesNested <- Fox.serialCombined(nextPositions) { position: VoxelPosition => - getAllAdHocChunksWithNeighborLogic(token, - tracing, + getAllAdHocChunksWithNeighborLogic(tracing, + annotationId, tracingId, mag, voxelSize, @@ -170,11 +177,12 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, allVertices: List[Array[Float]] = vertices +: neighborVerticesNested.flatten } yield allVertices - private def loadMeshChunkFromAdHoc(token: Option[String], - tracing: VolumeTracing, + private def loadMeshChunkFromAdHoc(tracing: VolumeTracing, adHocMeshRequest: WebknossosAdHocMeshRequest, - tracingId: String): Fox[(Array[Float], List[Int])] = - if (tracing.getHasEditableMapping) - editableMappingService.createAdHocMesh(tracing, tracingId, adHocMeshRequest, token) - else volumeTracingService.createAdHocMesh(tracingId, adHocMeshRequest, token) + annotationId: String, + tracingId: String)(implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = + if (tracing.getHasEditableMapping) { + val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.createAdHocMesh(mappingLayer, adHocMeshRequest) + } else volumeTracingService.createAdHocMesh(tracingId, tracing, adHocMeshRequest) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala index 5b9fb21fad4..c16fa929d47 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala @@ -25,11 +25,7 @@ import java.util.zip.{ZipEntry, ZipFile} import scala.collection.mutable import scala.concurrent.ExecutionContext -trait VolumeDataZipHelper - extends WKWDataFormatHelper - with VolumeBucketReversionHelper - with BoxImplicits - with LazyLogging { +trait VolumeDataZipHelper extends WKWDataFormatHelper with ReversionHelper with BoxImplicits with LazyLogging { protected def withBucketsFromZip(zipFile: File)(block: (BucketPosition, Array[Byte]) => Fox[Unit])( implicit ec: ExecutionContext): Fox[Unit] = @@ -61,7 +57,7 @@ trait VolumeDataZipHelper parseWKWFilePath(fileName.toString).map { bucketPosition: BucketPosition => if (buckets.hasNext) { val data = buckets.next() - if (!isRevertedBucket(data)) { + if (!isRevertedElement(data)) { block(bucketPosition, data) } else Fox.successful(()) } else Fox.successful(()) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala index 755cc665464..9d0d35cf2e3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto @@ -33,7 +34,7 @@ class VolumeSegmentIndexBuffer(tracingId: String, remoteDatastoreClient: TSRemoteDatastoreClient, fallbackLayer: Option[RemoteFallbackLayer], additionalAxes: Option[Seq[AdditionalAxis]], - userToken: Option[String]) + tc: TokenContext) extends KeyValueStoreImplicits with SegmentIndexKeyHelper with ProtoGeometryImplicits @@ -86,12 +87,7 @@ class VolumeSegmentIndexBuffer(tracingId: String, .fillEmpty(ListOfVec3IntProto.of(Seq())) data <- fallbackLayer match { case Some(layer) if fossilDbData.length == 0 => - remoteDatastoreClient.querySegmentIndex(layer, - segmentId, - mag, - mappingName, - editableMappingTracingId, - userToken) + remoteDatastoreClient.querySegmentIndex(layer, segmentId, mag, mappingName, editableMappingTracingId)(tc) case _ => Fox.successful(fossilDbData.values.map(vec3IntFromProto)) } } yield ListOfVec3IntProto(data.map(vec3IntToProto)) @@ -168,13 +164,8 @@ class VolumeSegmentIndexBuffer(tracingId: String, fileBucketPositions <- fallbackLayer match { case Some(layer) => for { - fileBucketPositionsOpt <- Fox.runIf(missesSoFar.nonEmpty)( - remoteDatastoreClient.querySegmentIndexForMultipleSegments(layer, - missesSoFar, - mag, - mappingName, - editableMappingTracingId, - userToken)) + fileBucketPositionsOpt <- Fox.runIf(missesSoFar.nonEmpty)(remoteDatastoreClient + .querySegmentIndexForMultipleSegments(layer, missesSoFar, mag, mappingName, editableMappingTracingId)(tc)) fileBucketPositions = fileBucketPositionsOpt.getOrElse(Seq()) _ = fileBucketPositions.map { case (segmentId, positions) => diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala index 150c4938bb6..4416f678d9a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.box2Fox @@ -25,11 +26,11 @@ import net.liftweb.common.Box.tryo import scala.concurrent.ExecutionContext object VolumeSegmentIndexService { - def canHaveSegmentIndex(remoteDatastoreClient: TSRemoteDatastoreClient, - fallbackLayer: Option[RemoteFallbackLayer], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[Boolean] = + def canHaveSegmentIndex(remoteDatastoreClient: TSRemoteDatastoreClient, fallbackLayer: Option[RemoteFallbackLayer])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Boolean] = fallbackLayer match { - case Some(layer) => remoteDatastoreClient.hasSegmentIndexFile(layer, userToken) + case Some(layer) => remoteDatastoreClient.hasSegmentIndexFile(layer) case None => Fox.successful(true) } } @@ -43,6 +44,7 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore with ProtoGeometryImplicits with VolumeBucketCompression with SegmentIndexKeyHelper + with ReversionHelper with LazyLogging { private val volumeSegmentIndexClient: FossilDBClient = tracingDataStore.volumeSegmentIndex @@ -59,8 +61,14 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore mappingName: Option[String], editableMappingTracingId: Option[String])(implicit ec: ExecutionContext): Fox[Unit] = for { - bucketBytesDecompressed <- tryo( - decompressIfNeeded(bucketBytes, expectedUncompressedBucketSizeFor(elementClass), "")).toFox + bucketBytesDecompressed <- if (isRevertedElement(bucketBytes)) { + Fox.successful(emptyArrayForElementClass(elementClass)) + } else { + tryo( + decompressIfNeeded(bucketBytes, + expectedUncompressedBucketSizeFor(elementClass), + "updating segment index, new bucket data")).toFox + } // previous bytes: include fallback layer bytes if available, otherwise use empty bytes previousBucketBytesWithEmptyFallback <- bytesWithEmptyFallback(previousBucketBytesBox, elementClass) ?~> "volumeSegmentIndex.update.getPreviousBucket.failed" segmentIds: Set[Long] <- collectSegmentIds(bucketBytesDecompressed, elementClass) @@ -87,11 +95,14 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore private def bytesWithEmptyFallback(bytesBox: Box[Array[Byte]], elementClass: ElementClassProto)( implicit ec: ExecutionContext): Fox[Array[Byte]] = bytesBox match { - case Empty => Fox.successful(Array.fill[Byte](ElementClass.bytesPerElement(elementClass))(0)) + case Empty => Fox.successful(emptyArrayForElementClass(elementClass)) case Full(bytes) => Fox.successful(bytes) case f: Failure => f.toFox } + private def emptyArrayForElementClass(elementClass: ElementClassProto): Array[Byte] = + Array.fill[Byte](ElementClass.bytesPerElement(elementClass))(0) + private def removeBucketFromSegmentIndex( segmentIndexBuffer: VolumeSegmentIndexBuffer, segmentId: Long, @@ -158,17 +169,17 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore bucketList <- addEmptyFallback(bucketListBox) } yield bucketList - def getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( - fallbackLayer: Option[RemoteFallbackLayer], - tracingId: String, - segmentId: Long, - mag: Vec3Int, - version: Option[Long] = None, - mappingName: Option[String], - editableMappingTracingId: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - additionalAxes: Option[Seq[AdditionalAxis]], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[ListOfVec3IntProto] = + def getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer(fallbackLayer: Option[RemoteFallbackLayer], + tracingId: String, + segmentId: Long, + mag: Vec3Int, + version: Option[Long] = None, + mappingName: Option[String], + editableMappingTracingId: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + additionalAxes: Option[Seq[AdditionalAxis]])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[ListOfVec3IntProto] = for { bucketListBox <- getSegmentToBucketIndex(fallbackLayer, tracingId, @@ -178,8 +189,7 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore mappingName, editableMappingTracingId, additionalCoordinates, - additionalAxes, - userToken).futureBox + additionalAxes).futureBox bucketList <- addEmptyFallback(bucketListBox) } yield bucketList @@ -191,17 +201,17 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore case Empty => Fox.successful(ListOfVec3IntProto(Seq.empty)) } - private def getSegmentToBucketIndex( - fallbackLayerOpt: Option[RemoteFallbackLayer], - tracingId: String, - segmentId: Long, - mag: Vec3Int, - version: Option[Long], - mappingName: Option[String], - editableMappingTracingId: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - additionalAxes: Option[Seq[AdditionalAxis]], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[ListOfVec3IntProto] = + private def getSegmentToBucketIndex(fallbackLayerOpt: Option[RemoteFallbackLayer], + tracingId: String, + segmentId: Long, + mag: Vec3Int, + version: Option[Long], + mappingName: Option[String], + editableMappingTracingId: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + additionalAxes: Option[Seq[AdditionalAxis]])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[ListOfVec3IntProto] = for { fromMutableIndex <- getSegmentToBucketIndexFromFossilDB(tracingId, segmentId, @@ -211,12 +221,7 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore additionalAxes).fillEmpty(ListOfVec3IntProto.of(Seq())) fromFileIndex <- fallbackLayerOpt match { // isEmpty is not the same as length == 0 here :( case Some(fallbackLayer) if fromMutableIndex.length == 0 => - getSegmentToBucketIndexFromFile(fallbackLayer, - segmentId, - mag, - mappingName, - editableMappingTracingId, - userToken) // additional coordinates not supported, see #7556 + getSegmentToBucketIndexFromFile(fallbackLayer, segmentId, mag, mappingName, editableMappingTracingId) // additional coordinates not supported, see #7556 case _ => Fox.successful(Seq.empty) } combined = fromMutableIndex.values.map(vec3IntFromProto) ++ fromFileIndex @@ -237,8 +242,7 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore segmentId: Long, mag: Vec3Int, mappingName: Option[String], - editableMappingTracingId: Option[String], - userToken: Option[String]) = - remoteDatastoreClient.querySegmentIndex(layer, segmentId, mag, mappingName, editableMappingTracingId, userToken) + editableMappingTracingId: Option[String])(implicit tc: TokenContext) = + remoteDatastoreClient.querySegmentIndex(layer, segmentId, mag, mappingName, editableMappingTracingId) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala index cb12c273f53..cede715f841 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing @@ -9,71 +10,77 @@ import com.scalableminds.webknossos.datastore.models.{UnsignedInteger, UnsignedI import com.scalableminds.webknossos.datastore.models.datasource.DataLayer import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import javax.inject.Inject import scala.concurrent.ExecutionContext class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTracingService, + annotationService: TSAnnotationService, volumeSegmentIndexService: VolumeSegmentIndexService, editableMappingService: EditableMappingService) extends ProtoGeometryImplicits with SegmentStatistics { // Returns the segment volume (=number of voxels) in the target mag - def getSegmentVolume(tracingId: String, + def getSegmentVolume(annotationId: String, + tracingId: String, segmentId: Long, mag: Vec3Int, mappingName: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[Long] = + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = calculateSegmentVolume( segmentId, mag, additionalCoordinates, - getBucketPositions(tracingId, mappingName, additionalCoordinates, userToken), - getTypedDataForBucketPosition(tracingId, userToken) + getBucketPositions(annotationId, tracingId, mappingName, additionalCoordinates), + getTypedDataForBucketPosition(annotationId, tracingId) ) - def getSegmentBoundingBox(tracingId: String, + def getSegmentBoundingBox(annotationId: String, + tracingId: String, segmentId: Long, mag: Vec3Int, mappingName: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[BoundingBox] = + additionalCoordinates: Option[Seq[AdditionalCoordinate]])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[BoundingBox] = calculateSegmentBoundingBox( segmentId, mag, additionalCoordinates, - getBucketPositions(tracingId, mappingName, additionalCoordinates, userToken), - getTypedDataForBucketPosition(tracingId, userToken) + getBucketPositions(annotationId, tracingId, mappingName, additionalCoordinates), + getTypedDataForBucketPosition(annotationId, tracingId) ) - private def getTypedDataForBucketPosition(tracingId: String, userToken: Option[String])( + private def getTypedDataForBucketPosition(annotationId: String, tracingId: String)( bucketPosition: Vec3Int, mag: Vec3Int, - additionalCoordinates: Option[Seq[AdditionalCoordinate]]) = + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit tc: TokenContext, ec: ExecutionContext) = for { - tracing <- volumeTracingService.find(tracingId) ?~> "tracing.notFound" - bucketData <- getVolumeDataForPositions(tracing, + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> "tracing.notFound" + bucketData <- getVolumeDataForPositions(annotationId, tracingId, + tracing, mag, Seq(bucketPosition), - additionalCoordinates, - userToken) + additionalCoordinates) dataTyped: Array[UnsignedInteger] = UnsignedIntegerArray.fromByteArray( bucketData, elementClassFromProto(tracing.elementClass)) } yield dataTyped - private def getBucketPositions( - tracingId: String, - mappingName: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - userToken: Option[String])(segmentId: Long, mag: Vec3Int)(implicit ec: ExecutionContext) = + private def getBucketPositions(annotationId: String, + tracingId: String, + mappingName: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]])( + segmentId: Long, + mag: Vec3Int)(implicit ec: ExecutionContext, tc: TokenContext) = for { - fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId) - tracing <- volumeTracingService.find(tracingId) ?~> "tracing.notFound" + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> "tracing.notFound" + fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId, tracing) additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes) allBucketPositions: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( @@ -85,17 +92,17 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci mappingName, editableMappingTracingId = volumeTracingService.editableMappingTracingId(tracing, tracingId), additionalCoordinates, - additionalAxes, - userToken + additionalAxes ) } yield allBucketPositions - private def getVolumeDataForPositions(tracing: VolumeTracing, - tracingId: String, - mag: Vec3Int, - bucketPositions: Seq[Vec3Int], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - userToken: Option[String]): Fox[Array[Byte]] = { + private def getVolumeDataForPositions( + annotationId: String, + tracingId: String, + tracing: VolumeTracing, + mag: Vec3Int, + bucketPositions: Seq[Vec3Int], + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit tc: TokenContext): Fox[Array[Byte]] = { val dataRequests = bucketPositions.map { position => WebknossosDataRequest( @@ -109,9 +116,10 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci ) }.toList for { - (data, _) <- if (tracing.getHasEditableMapping) - editableMappingService.volumeData(tracing, tracingId, dataRequests, userToken) - else volumeTracingService.data(tracingId, tracing, dataRequests, includeFallbackDataIfAvailable = true, userToken) + (data, _) <- if (tracing.getHasEditableMapping) { + val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.volumeData(mappingLayer, dataRequests) + } else volumeTracingService.data(tracingId, tracing, dataRequests, includeFallbackDataIfAvailable = true) } yield data } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index 14832ee36de..8a84fdbcce6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -2,7 +2,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.dataformats.wkw.{MortonEncoding, WKWDataFormatHelper} +import com.scalableminds.webknossos.datastore.dataformats.wkw.WKWDataFormatHelper import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer, ElementClass} import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, BucketPosition, WebknossosDataRequest} import com.scalableminds.webknossos.datastore.services.DataConverter @@ -13,12 +13,13 @@ import net.liftweb.common.{Empty, Failure, Full} import scala.annotation.tailrec import scala.concurrent.ExecutionContext -import scala.concurrent.duration._ -trait VolumeBucketReversionHelper { - protected def isRevertedBucket(data: Array[Byte]): Boolean = data sameElements Array[Byte](0) +trait ReversionHelper { + val revertedValue: Array[Byte] = Array[Byte](0) - protected def isRevertedBucket(bucket: VersionedKeyValuePair[Array[Byte]]): Boolean = isRevertedBucket(bucket.value) + protected def isRevertedElement(data: Array[Byte]): Boolean = data.sameElements(revertedValue) + + protected def isRevertedElement(bucket: VersionedKeyValuePair[Array[Byte]]): Boolean = isRevertedElement(bucket.value) } trait VolumeBucketCompression extends LazyLogging { @@ -48,7 +49,7 @@ trait VolumeBucketCompression extends LazyLogging { } catch { case e: Exception => logger.error( - s"Failed to LZ4-decompress volume bucket ($debugInfo, expected uncompressed size $expectedUncompressedBucketSize): $e") + s"Failed to LZ4-decompress volume bucket ($debugInfo, compressed size: ${data.length}, expected uncompressed size $expectedUncompressedBucketSize): $e") throw e } } @@ -78,20 +79,18 @@ trait AdditionalCoordinateKey { } } -trait BucketKeys extends MortonEncoding with WKWDataFormatHelper with LazyLogging with AdditionalCoordinateKey { +trait BucketKeys extends WKWDataFormatHelper with AdditionalCoordinateKey { protected def buildBucketKey(dataLayerName: String, bucket: BucketPosition, - additionalAxes: Option[Seq[AdditionalAxis]]): String = { - val mortonIndex = mortonEncode(bucket.bucketX, bucket.bucketY, bucket.bucketZ) + additionalAxes: Option[Seq[AdditionalAxis]]): String = (bucket.additionalCoordinates, additionalAxes, bucket.hasAdditionalCoordinates) match { case (Some(additionalCoordinates), Some(axes), true) => - s"$dataLayerName/${bucket.mag.toMagLiteral(allowScalar = true)}/$mortonIndex-[${additionalCoordinatesKeyPart( + s"$dataLayerName/${bucket.mag.toMagLiteral(allowScalar = true)}/[${additionalCoordinatesKeyPart( additionalCoordinates, axes)}][${bucket.bucketX},${bucket.bucketY},${bucket.bucketZ}]" case _ => - s"$dataLayerName/${bucket.mag.toMagLiteral(allowScalar = true)}/$mortonIndex-[${bucket.bucketX},${bucket.bucketY},${bucket.bucketZ}]" + s"$dataLayerName/${bucket.mag.toMagLiteral(allowScalar = true)}/[${bucket.bucketX},${bucket.bucketY},${bucket.bucketZ}]" } - } protected def buildKeyPrefix(dataLayerName: String): String = s"$dataLayerName/" @@ -104,7 +103,7 @@ trait BucketKeys extends MortonEncoding with WKWDataFormatHelper with LazyLoggin } private def parseBucketKeyXYZ(key: String) = { - val keyRx = "([0-9a-z-]+)/(\\d+|\\d+-\\d+-\\d+)/-?\\d+-\\[(\\d+),(\\d+),(\\d+)]".r + val keyRx = "([0-9a-z-]+)/(\\d+|\\d+-\\d+-\\d+)/\\[(\\d+),(\\d+),(\\d+)]".r key match { case keyRx(name, magStr, xStr, yStr, zStr) => getBucketPosition(xStr, yStr, zStr, magStr, None).map(bucketPosition => (name, bucketPosition)) @@ -117,7 +116,7 @@ trait BucketKeys extends MortonEncoding with WKWDataFormatHelper with LazyLoggin key: String, additionalAxes: Seq[AdditionalAxis]): Option[(String, BucketPosition)] = { val additionalCoordinateCapture = Array.fill(additionalAxes.length)("(\\d+)").mkString(",") - val keyRx = s"([0-9a-z-]+)/(\\d+|\\d+-\\d+-\\d+)/-?\\d+-\\[$additionalCoordinateCapture]\\[(\\d+),(\\d+),(\\d+)]".r + val keyRx = s"([0-9a-z-]+)/(\\d+|\\d+-\\d+-\\d+)/\\[$additionalCoordinateCapture]\\[(\\d+),(\\d+),(\\d+)]".r val matchOpt = keyRx.findFirstMatchIn(key) matchOpt match { case Some(aMatch) => @@ -170,63 +169,58 @@ trait VolumeTracingBucketHelper with VolumeBucketCompression with DataConverter with BucketKeys - with VolumeBucketReversionHelper { + with ReversionHelper { implicit def ec: ExecutionContext + def volumeDataStore: FossilDBClient + def temporaryTracingService: TemporaryTracingService - // used to store compound annotations - private val temporaryVolumeDataTimeout: FiniteDuration = 70 minutes - - implicit def volumeDataStore: FossilDBClient - implicit def temporaryVolumeDataStore: TemporaryVolumeDataStore - - private def loadBucketFromTemporaryStore(key: String) = - temporaryVolumeDataStore.find(key).map(VersionedKeyValuePair(VersionedKey(key, 0), _)) - - def loadBucket(dataLayer: VolumeTracingLayer, + def loadBucket(volumeTracingLayer: VolumeTracingLayer, bucket: BucketPosition, version: Option[Long] = None): Fox[Array[Byte]] = { - val key = buildBucketKey(dataLayer.name, bucket, dataLayer.additionalAxes) + val bucketKey = buildBucketKey(volumeTracingLayer.name, bucket, volumeTracingLayer.additionalAxes) + + val dataFox = + if (volumeTracingLayer.isTemporaryTracing) + temporaryTracingService.getVolumeBucket(bucketKey).map(VersionedKeyValuePair(VersionedKey(bucketKey, 0), _)) + else + volumeDataStore.get(bucketKey, version, mayBeEmpty = Some(true)) - val dataFox = loadBucketFromTemporaryStore(key) match { - case Some(data) => Fox.successful(data) - case None => volumeDataStore.get(key, version, mayBeEmpty = Some(true)) - } val unpackedDataFox = dataFox.flatMap { versionedVolumeBucket => - if (isRevertedBucket(versionedVolumeBucket)) Fox.empty + if (isRevertedElement(versionedVolumeBucket)) Fox.empty else { val debugInfo = - s"key: $key, ${versionedVolumeBucket.value.length} bytes, version ${versionedVolumeBucket.version}" + s"key: $bucketKey, ${versionedVolumeBucket.value.length} bytes, version ${versionedVolumeBucket.version}" Fox.successful( - decompressIfNeeded(versionedVolumeBucket.value, expectedUncompressedBucketSizeFor(dataLayer), debugInfo)) + decompressIfNeeded(versionedVolumeBucket.value, + expectedUncompressedBucketSizeFor(volumeTracingLayer), + debugInfo)) } } unpackedDataFox.futureBox.flatMap { case Full(unpackedData) => Fox.successful(unpackedData) case Empty => - if (dataLayer.includeFallbackDataIfAvailable && dataLayer.tracing.fallbackLayer.nonEmpty) { - loadFallbackBucket(dataLayer, bucket) + if (volumeTracingLayer.includeFallbackDataIfAvailable && volumeTracingLayer.tracing.fallbackLayer.nonEmpty) { + loadFallbackBucket(volumeTracingLayer, bucket) } else Fox.empty case f: Failure => f.toFox } } - private def loadFallbackBucket(dataLayer: VolumeTracingLayer, bucket: BucketPosition): Fox[Array[Byte]] = { + private def loadFallbackBucket(layer: VolumeTracingLayer, bucket: BucketPosition): Fox[Array[Byte]] = { val dataRequest: WebknossosDataRequest = WebknossosDataRequest( position = Vec3Int(bucket.topLeft.mag1X, bucket.topLeft.mag1Y, bucket.topLeft.mag1Z), mag = bucket.mag, - cubeSize = dataLayer.lengthOfUnderlyingCubes(bucket.mag), + cubeSize = layer.lengthOfUnderlyingCubes(bucket.mag), fourBit = None, - applyAgglomerate = dataLayer.tracing.mappingName, + applyAgglomerate = layer.tracing.mappingName, version = None, additionalCoordinates = None ) for { - remoteFallbackLayer <- dataLayer.volumeTracingService - .remoteFallbackLayerFromVolumeTracing(dataLayer.tracing, dataLayer.name) - (unmappedData, indices) <- dataLayer.volumeTracingService.getFallbackDataFromDatastore(remoteFallbackLayer, - List(dataRequest), - dataLayer.userToken) + remoteFallbackLayer <- layer.volumeTracingService.remoteFallbackLayerFromVolumeTracing(layer.tracing, layer.name) + (unmappedData, indices) <- layer.volumeTracingService + .getFallbackDataFromDatastore(remoteFallbackLayer, List(dataRequest))(ec, layer.tokenContext) unmappedDataOrEmpty <- if (indices.isEmpty) Fox.successful(unmappedData) else Fox.empty } yield unmappedDataOrEmpty } @@ -251,20 +245,18 @@ trait VolumeTracingBucketHelper version: Long, toTemporaryStore: Boolean, additionalAxes: Option[Seq[AdditionalAxis]]): Fox[Unit] = { - val key = buildBucketKey(tracingId, bucket, additionalAxes) + val bucketKey = buildBucketKey(tracingId, bucket, additionalAxes) val compressedBucket = compressVolumeBucket(data, expectedUncompressedBucketSizeFor(elementClass)) if (toTemporaryStore) { - // Note that this temporary store is for temporary volumes only (e.g. compound projects) - // and cannot be used for download or versioning - Fox.successful(temporaryVolumeDataStore.insert(key, compressedBucket, Some(temporaryVolumeDataTimeout))) + temporaryTracingService.saveVolumeBucket(bucketKey, compressedBucket) } else { - volumeDataStore.put(key, version, compressedBucket) + volumeDataStore.put(bucketKey, version, compressedBucket) } } def bucketStream(dataLayer: VolumeTracingLayer, version: Option[Long]): Iterator[(BucketPosition, Array[Byte])] = { - val key = buildKeyPrefix(dataLayer.name) - new BucketIterator(key, + val keyPrefix = buildKeyPrefix(dataLayer.name) + new BucketIterator(keyPrefix, volumeDataStore, expectedUncompressedBucketSizeFor(dataLayer), version, @@ -273,8 +265,8 @@ trait VolumeTracingBucketHelper def bucketStreamWithVersion(dataLayer: VolumeTracingLayer, version: Option[Long]): Iterator[(BucketPosition, Array[Byte], Long)] = { - val key = buildKeyPrefix(dataLayer.name) - new VersionedBucketIterator(key, + val keyPrefix = buildKeyPrefix(dataLayer.name) + new VersionedBucketIterator(keyPrefix, volumeDataStore, expectedUncompressedBucketSizeFor(dataLayer), version, @@ -283,7 +275,7 @@ trait VolumeTracingBucketHelper def bucketStreamFromTemporaryStore(dataLayer: VolumeTracingLayer): Iterator[(BucketPosition, Array[Byte])] = { val keyPrefix = buildKeyPrefix(dataLayer.name) - val keyValuePairs = temporaryVolumeDataStore.findAllConditionalWithKey(key => key.startsWith(keyPrefix)) + val keyValuePairs = temporaryTracingService.getAllVolumeBucketsWithPrefix(keyPrefix) keyValuePairs.flatMap { case (bucketKey, data) => parseBucketKey(bucketKey, dataLayer.additionalAxes).map(tuple => (tuple._2, data)) @@ -301,7 +293,7 @@ class VersionedBucketIterator(prefix: String, with VolumeBucketCompression with BucketKeys with FoxImplicits - with VolumeBucketReversionHelper { + with ReversionHelper { private val batchSize = 64 private var currentStartAfterKey: Option[String] = None @@ -321,7 +313,7 @@ class VersionedBucketIterator(prefix: String, if (currentBatchIterator.hasNext) { val bucket = currentBatchIterator.next() currentStartAfterKey = Some(bucket.key) - if (isRevertedBucket(bucket) || parseBucketKey(bucket.key, additionalAxes).isEmpty) { + if (isRevertedElement(bucket) || parseBucketKey(bucket.key, additionalAxes).isEmpty) { getNextNonRevertedBucket } else { Some(bucket) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala deleted file mode 100644 index 59a915494fa..00000000000 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ /dev/null @@ -1,302 +0,0 @@ -package com.scalableminds.webknossos.tracingstore.tracings.volume - -import com.scalableminds.util.geometry.Vec3Int -import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.{BucketPosition, UnsignedIntegerArray} -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, DataSourceLike, ElementClass} -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto -import com.scalableminds.webknossos.tracingstore.TSRemoteWebknossosClient -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing -import com.scalableminds.webknossos.tracingstore.tracings.{ - FossilDBClient, - KeyValueStoreImplicits, - TracingDataStore, - VersionedKeyValuePair -} -import net.liftweb.common.Empty -import com.scalableminds.webknossos.datastore.geometry.{Vec3IntProto => ProtoPoint3D} -import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import net.liftweb.common.Box -import play.api.libs.json.{Format, Json} - -import scala.collection.mutable -import scala.concurrent.ExecutionContext -import scala.reflect.ClassTag - -object VolumeTracingDownsampling { - private def magsForVolumeTracingByLayerName(dataSource: DataSourceLike, - fallbackLayerName: Option[String]): List[Vec3Int] = { - val fallbackLayer: Option[DataLayerLike] = - fallbackLayerName.flatMap(name => dataSource.dataLayers.find(_.name == name)) - magsForVolumeTracing(dataSource, fallbackLayer) - } - - def magsForVolumeTracing(dataSource: DataSourceLike, fallbackLayer: Option[DataLayerLike]): List[Vec3Int] = { - val fallbackLayerMags = fallbackLayer.map(_.resolutions) - fallbackLayerMags.getOrElse { - val unionOfAllLayers = dataSource.dataLayers.flatMap(_.resolutions).distinct - val unionHasDistinctMaxDims = unionOfAllLayers.map(_.maxDim).distinct.length == unionOfAllLayers.length - if (unionHasDistinctMaxDims) { - unionOfAllLayers - } else { - // If the union of all layer’s mags has conflicting mags (meaning non-distinct maxDims, e.g. 2-2-1 and 2-2-2), - // instead use one layer as template. Use the layer with the most mags. - dataSource.dataLayers.maxBy(_.resolutions.length).resolutions.distinct - } - }.sortBy(_.maxDim) - } -} - -trait VolumeTracingDownsampling - extends BucketKeys - with ProtoGeometryImplicits - with VolumeBucketCompression - with KeyValueStoreImplicits - with FoxImplicits { - - val tracingDataStore: TracingDataStore - val tracingStoreWkRpcClient: TSRemoteWebknossosClient - protected def saveBucket(dataLayer: VolumeTracingLayer, - bucket: BucketPosition, - data: Array[Byte], - version: Long, - toCache: Boolean = false): Fox[Unit] - - protected def updateSegmentIndex(segmentIndexBuffer: VolumeSegmentIndexBuffer, - bucketPosition: BucketPosition, - bucketBytes: Array[Byte], - previousBucketBytesBox: Box[Array[Byte]], - elementClass: ElementClassProto, - mappingName: Option[String], - editableMappingTracingId: Option[String]): Fox[Unit] - - protected def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] - - protected def baseMappingName(tracing: VolumeTracing): Fox[Option[String]] - - protected def volumeSegmentIndexClient: FossilDBClient - - protected def downsampleWithLayer(tracingId: String, - oldTracingId: String, - tracing: VolumeTracing, - dataLayer: VolumeTracingLayer, - tracingService: VolumeTracingService, - userToken: Option[String])(implicit ec: ExecutionContext): Fox[List[Vec3Int]] = { - val bucketVolume = 32 * 32 * 32 - for { - _ <- bool2Fox(tracing.version == 0L) ?~> "Tracing has already been edited." - _ <- bool2Fox(tracing.mags.nonEmpty) ?~> "Cannot downsample tracing with no mag list" - sourceMag = getSourceMag(tracing) - magsToCreate <- getMagsToCreate(tracing, oldTracingId) - elementClass = elementClassFromProto(tracing.elementClass) - bucketDataMapMutable = new mutable.HashMap[BucketPosition, Array[Byte]]().withDefault(_ => Array[Byte](0)) - _ = fillMapWithSourceBucketsInplace(bucketDataMapMutable, tracingId, dataLayer, sourceMag) - originalBucketPositions = bucketDataMapMutable.keys.toList - updatedBucketsMutable = new mutable.ListBuffer[BucketPosition]() - _ = magsToCreate.foldLeft(sourceMag) { (previousMag, requiredMag) => - downsampleMagFromMag(previousMag, - requiredMag, - originalBucketPositions, - bucketDataMapMutable, - updatedBucketsMutable, - bucketVolume, - elementClass, - dataLayer) - requiredMag - } - fallbackLayer <- tracingService.getFallbackLayer(oldTracingId) // remote wk does not know the new id yet - tracing <- tracingService.find(tracingId) ?~> "tracing.notFound" - segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, - volumeSegmentIndexClient, - tracing.version, - tracingService.remoteDatastoreClient, - fallbackLayer, - dataLayer.additionalAxes, - userToken) - _ <- Fox.serialCombined(updatedBucketsMutable.toList) { bucketPosition: BucketPosition => - for { - _ <- saveBucket(dataLayer, bucketPosition, bucketDataMapMutable(bucketPosition), tracing.version) - mappingName <- baseMappingName(tracing) - _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( - updateSegmentIndex( - segmentIndexBuffer, - bucketPosition, - bucketDataMapMutable(bucketPosition), - Empty, - tracing.elementClass, - mappingName, - editableMappingTracingId(tracing, tracingId) - )) - } yield () - } - _ <- segmentIndexBuffer.flush() - _ = logger.debug(s"Downsampled mags $magsToCreate from $sourceMag for volume tracing $tracingId.") - } yield sourceMag :: magsToCreate - } - - private def fillMapWithSourceBucketsInplace(bucketDataMap: mutable.Map[BucketPosition, Array[Byte]], - tracingId: String, - dataLayer: VolumeTracingLayer, - sourceMag: Vec3Int): Unit = { - val data: List[VersionedKeyValuePair[Array[Byte]]] = - tracingDataStore.volumeData.getMultipleKeys(None, Some(tracingId)) - data.foreach { keyValuePair: VersionedKeyValuePair[Array[Byte]] => - val bucketPositionOpt = parseBucketKey(keyValuePair.key, dataLayer.additionalAxes).map(_._2) - bucketPositionOpt.foreach { bucketPosition => - if (bucketPosition.mag == sourceMag) { - bucketDataMap(bucketPosition) = decompressIfNeeded(keyValuePair.value, - expectedUncompressedBucketSizeFor(dataLayer), - s"bucket $bucketPosition during downsampling") - } - } - } - } - - private def downsampleMagFromMag(previousMag: Vec3Int, - requiredMag: Vec3Int, - originalBucketPositions: List[BucketPosition], - bucketDataMapMutable: mutable.Map[BucketPosition, Array[Byte]], - updatedBucketsMutable: mutable.ListBuffer[BucketPosition], - bucketVolume: Int, - elementClass: ElementClass.Value, - dataLayer: VolumeTracingLayer): Unit = { - val downScaleFactor = - Vec3Int(requiredMag.x / previousMag.x, requiredMag.y / previousMag.y, requiredMag.z / previousMag.z) - downsampledBucketPositions(originalBucketPositions, requiredMag).foreach { downsampledBucketPosition => - val sourceBuckets: Seq[BucketPosition] = - sourceBucketPositionsFor(downsampledBucketPosition, downScaleFactor, previousMag) - val sourceData: Seq[Array[Byte]] = sourceBuckets.map(bucketDataMapMutable(_)) - val downsampledData: Array[Byte] = - if (sourceData.forall(_.sameElements(Array[Byte](0)))) - Array[Byte](0) - else { - val sourceDataFilled = fillZeroedIfNeeded(sourceData, bucketVolume, dataLayer.bytesPerElement) - val sourceDataTyped = UnsignedIntegerArray.fromByteArray(sourceDataFilled.toArray.flatten, elementClass) - val dataDownscaledTyped = - downsampleData(sourceDataTyped.grouped(bucketVolume).toArray, downScaleFactor, bucketVolume) - UnsignedIntegerArray.toByteArray(dataDownscaledTyped, elementClass) - } - bucketDataMapMutable(downsampledBucketPosition) = downsampledData - updatedBucketsMutable += downsampledBucketPosition - } - } - - private def downsampledBucketPositions(originalBucketPositions: List[BucketPosition], - requiredMag: Vec3Int): Set[BucketPosition] = - originalBucketPositions.map { bucketPosition: BucketPosition => - BucketPosition( - (bucketPosition.voxelMag1X / requiredMag.x / 32) * requiredMag.x * 32, - (bucketPosition.voxelMag1Y / requiredMag.y / 32) * requiredMag.y * 32, - (bucketPosition.voxelMag1Z / requiredMag.z / 32) * requiredMag.z * 32, - requiredMag, - bucketPosition.additionalCoordinates - ) - }.toSet - - private def sourceBucketPositionsFor(bucketPosition: BucketPosition, - downScaleFactor: Vec3Int, - previousMag: Vec3Int): Seq[BucketPosition] = - for { - z <- 0 until downScaleFactor.z - y <- 0 until downScaleFactor.y - x <- 0 until downScaleFactor.x - } yield { - BucketPosition( - bucketPosition.voxelMag1X + x * bucketPosition.bucketLength * previousMag.x, - bucketPosition.voxelMag1Y + y * bucketPosition.bucketLength * previousMag.y, - bucketPosition.voxelMag1Z + z * bucketPosition.bucketLength * previousMag.z, - previousMag, - bucketPosition.additionalCoordinates - ) - } - - private def fillZeroedIfNeeded(sourceData: Seq[Array[Byte]], - bucketVolume: Int, - bytesPerElement: Int): Seq[Array[Byte]] = - // Reverted buckets and missing buckets are represented by a single zero-byte. - // For downsampling, those need to be replaced with the full bucket volume of zero-bytes. - sourceData.map { sourceBucketData => - if (sourceBucketData.sameElements(Array[Byte](0))) { - Array.fill[Byte](bucketVolume * bytesPerElement)(0) - } else sourceBucketData - } - - private def downsampleData[T: ClassTag](data: Array[Array[T]], - downScaleFactor: Vec3Int, - bucketVolume: Int): Array[T] = { - val result = new Array[T](bucketVolume) - for { - z <- 0 until 32 - y <- 0 until 32 - x <- 0 until 32 - } { - val voxelSourceData: IndexedSeq[T] = for { - z_offset <- 0 until downScaleFactor.z - y_offset <- 0 until downScaleFactor.y - x_offset <- 0 until downScaleFactor.x - } yield { - val sourceVoxelPosition = - Vec3Int(x * downScaleFactor.x + x_offset, y * downScaleFactor.y + y_offset, z * downScaleFactor.z + z_offset) - val sourceBucketPosition = - Vec3Int(sourceVoxelPosition.x / 32, sourceVoxelPosition.y / 32, sourceVoxelPosition.z / 32) - val sourceVoxelPositionInSourceBucket = - Vec3Int(sourceVoxelPosition.x % 32, sourceVoxelPosition.y % 32, sourceVoxelPosition.z % 32) - val sourceBucketIndex = sourceBucketPosition.x + sourceBucketPosition.y * downScaleFactor.y + sourceBucketPosition.z * downScaleFactor.y * downScaleFactor.z - val sourceVoxelIndex = sourceVoxelPositionInSourceBucket.x + sourceVoxelPositionInSourceBucket.y * 32 + sourceVoxelPositionInSourceBucket.z * 32 * 32 - data(sourceBucketIndex)(sourceVoxelIndex) - } - result(x + y * 32 + z * 32 * 32) = mode(voxelSourceData) - } - result - } - - private def mode[T](items: Seq[T]): T = - items.groupBy(i => i).view.mapValues(_.size).maxBy(_._2)._1 - - private def getSourceMag(tracing: VolumeTracing): Vec3Int = - tracing.mags.minBy(_.maxDim) - - private def getMagsToCreate(tracing: VolumeTracing, oldTracingId: String): Fox[List[Vec3Int]] = - for { - requiredMags <- getRequiredMags(tracing, oldTracingId) - sourceMag = getSourceMag(tracing) - magsToCreate = requiredMags.filter(_.maxDim > sourceMag.maxDim) - } yield magsToCreate - - private def getRequiredMags(tracing: VolumeTracing, oldTracingId: String): Fox[List[Vec3Int]] = - for { - dataSource: DataSourceLike <- tracingStoreWkRpcClient.getDataSourceForTracing(oldTracingId) - magsForTracing = VolumeTracingDownsampling.magsForVolumeTracingByLayerName(dataSource, tracing.fallbackLayer) - } yield magsForTracing.sortBy(_.maxDim) - - protected def restrictMagList(tracing: VolumeTracing, magRestrictions: MagRestrictions): VolumeTracing = { - val tracingMags = - resolveLegacyMagList(tracing.mags) - val allowedMags = magRestrictions.filterAllowed(tracingMags.map(vec3IntFromProto)) - tracing.withMags(allowedMags.map(vec3IntToProto)) - } - - protected def resolveLegacyMagList(mags: Seq[ProtoPoint3D]): Seq[ProtoPoint3D] = - if (mags.isEmpty) Seq(ProtoPoint3D(1, 1, 1)) else mags -} - -object MagRestrictions { - def empty: MagRestrictions = MagRestrictions(None, None) - implicit val jsonFormat: Format[MagRestrictions] = Json.format[MagRestrictions] -} - -case class MagRestrictions( - min: Option[Int], - max: Option[Int] -) { - def filterAllowed(mags: Seq[Vec3Int]): Seq[Vec3Int] = - mags.filter(isAllowed) - - def isAllowed(mag: Vec3Int): Boolean = - min.getOrElse(0) <= mag.maxDim && max.getOrElse(Int.MaxValue) >= mag.maxDim - - def isForbidden(mag: Vec3Int): Boolean = !isAllowed(mag) - - def minStr: Option[String] = min.map(_.toString) - def maxStr: Option[String] = max.map(_.toString) -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index 534ca0ada77..f1e88112b3d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -1,25 +1,22 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, MagLocator} +import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource._ import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing -import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import com.scalableminds.webknossos.tracingstore.tracings.{ - FossilDBClient, - TemporaryTracingStore, - TemporaryVolumeDataStore -} +import com.scalableminds.webknossos.tracingstore.tracings.{FossilDBClient, TemporaryTracingService} import sun.reflect.generics.reflectiveObjects.NotImplementedException +import ucar.ma2.{Array => MultiArray} import scala.concurrent.ExecutionContext -import ucar.ma2.{Array => MultiArray} trait AbstractVolumeTracingBucketProvider extends BucketProvider with VolumeTracingBucketHelper with FoxImplicits { @@ -32,7 +29,7 @@ class VolumeTracingBucketProvider(layer: VolumeTracingLayer)(implicit val ec: Ex extends AbstractVolumeTracingBucketProvider { val volumeDataStore: FossilDBClient = layer.volumeDataStore - val temporaryVolumeDataStore: TemporaryVolumeDataStore = layer.volumeDataCache + val temporaryTracingService: TemporaryTracingService = layer.temporaryTracingService override def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[Array[Byte]] = loadBucket(layer, readInstruction.bucket, readInstruction.version) @@ -48,20 +45,14 @@ class TemporaryVolumeTracingBucketProvider(layer: VolumeTracingLayer)(implicit v extends AbstractVolumeTracingBucketProvider { val volumeDataStore: FossilDBClient = layer.volumeDataStore - val temporaryVolumeDataStore: TemporaryVolumeDataStore = layer.volumeDataCache - val temporaryTracingStore: TemporaryTracingStore[VolumeTracing] = layer.temporaryTracingStore + val temporaryTracingService: TemporaryTracingService = layer.temporaryTracingService override def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[Array[Byte]] = for { - _ <- assertTracingStillInCache(layer) + _ <- temporaryTracingService.assertTracingStillPresent(layer.name) data <- loadBucket(layer, readInstruction.bucket, readInstruction.version) } yield data - private def assertTracingStillInCache(layer: VolumeTracingLayer)(implicit ec: ExecutionContext): Fox[Unit] = - for { - _ <- bool2Fox(temporaryTracingStore.contains(layer.name)) ?~> "Temporary Volume Tracing expired" - } yield () - override def bucketStream(version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte])] = bucketStreamFromTemporaryStore(layer) @@ -72,15 +63,14 @@ class TemporaryVolumeTracingBucketProvider(layer: VolumeTracingLayer)(implicit v case class VolumeTracingLayer( name: String, volumeTracingService: VolumeTracingService, + temporaryTracingService: TemporaryTracingService, isTemporaryTracing: Boolean = false, includeFallbackDataIfAvailable: Boolean = false, tracing: VolumeTracing, - userToken: Option[String], - additionalAxes: Option[Seq[AdditionalAxis]] -)(implicit val volumeDataStore: FossilDBClient, - implicit val volumeDataCache: TemporaryVolumeDataStore, - implicit val temporaryTracingStore: TemporaryTracingStore[VolumeTracing], - implicit val ec: ExecutionContext) + tokenContext: TokenContext, + additionalAxes: Option[Seq[AdditionalAxis]], + volumeDataStore: FossilDBClient, +)(implicit val ec: ExecutionContext) extends SegmentationLayer with ProtoGeometryImplicits { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingMags.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingMags.scala new file mode 100644 index 00000000000..1c7f316bb39 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingMags.scala @@ -0,0 +1,57 @@ +package com.scalableminds.webknossos.tracingstore.tracings.volume + +import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, DataSourceLike} +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.geometry.{Vec3IntProto => ProtoPoint3D} +import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits +import play.api.libs.json.{Format, Json} + +object VolumeTracingMags extends ProtoGeometryImplicits { + + def magsForVolumeTracing(dataSource: DataSourceLike, fallbackLayer: Option[DataLayerLike]): List[Vec3Int] = { + val fallbackLayerMags = fallbackLayer.map(_.resolutions) + fallbackLayerMags.getOrElse { + val unionOfAllLayers = dataSource.dataLayers.flatMap(_.resolutions).distinct + val unionHasDistinctMaxDims = unionOfAllLayers.map(_.maxDim).distinct.length == unionOfAllLayers.length + if (unionHasDistinctMaxDims) { + unionOfAllLayers + } else { + // If the union of all layer’s mags has conflicting mags (meaning non-distinct maxDims, e.g. 2-2-1 and 2-2-2), + // instead use one layer as template. Use the layer with the most mags. + dataSource.dataLayers.maxBy(_.resolutions.length).resolutions.distinct + } + }.sortBy(_.maxDim) + } + + def restrictMagList(tracing: VolumeTracing, magRestrictions: MagRestrictions): VolumeTracing = { + val tracingMags = + resolveLegacyMagList(tracing.mags) + val allowedMags = magRestrictions.filterAllowed(tracingMags.map(vec3IntFromProto)) + tracing.withMags(allowedMags.map(vec3IntToProto)) + } + + def resolveLegacyMagList(mags: Seq[ProtoPoint3D]): Seq[ProtoPoint3D] = + if (mags.isEmpty) Seq(ProtoPoint3D(1, 1, 1)) else mags +} + +object MagRestrictions { + def empty: MagRestrictions = MagRestrictions(None, None) + implicit val jsonFormat: Format[MagRestrictions] = Json.format[MagRestrictions] +} + +case class MagRestrictions( + min: Option[Int], + max: Option[Int] +) { + def filterAllowed(mags: Seq[Vec3Int]): Seq[Vec3Int] = + mags.filter(isAllowed) + + def isAllowed(mag: Vec3Int): Boolean = + min.getOrElse(0) <= mag.maxDim && max.getOrElse(Int.MaxValue) >= mag.maxDim + + def isForbidden(mag: Vec3Int): Boolean = !isAllowed(mag) + + def minStr: Option[String] = min.map(_.toString) + def maxStr: Option[String] = max.map(_.toString) +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index f78c53cd0b8..3cf76c3a36a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -1,73 +1,55 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.{NamedStream, ZipIO} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto import com.scalableminds.webknossos.datastore.dataformats.wkw.WKWDataFormatHelper import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.DataRequestCollection.DataRequestCollection +import com.scalableminds.webknossos.datastore.models._ import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer, ElementClass} -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto import com.scalableminds.webknossos.datastore.models.requests.DataServiceDataRequest -import com.scalableminds.webknossos.datastore.models.{ - AdditionalCoordinate, - BucketPosition, - UnsignedInteger, - UnsignedIntegerArray, - VoxelSize, - WebknossosAdHocMeshRequest -} import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings._ -import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat -import com.scalableminds.webknossos.tracingstore.{ - TSRemoteDatastoreClient, - TSRemoteWebknossosClient, - TracingStoreRedisStore -} +import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.Files import play.api.libs.Files.TemporaryFileCreator -import play.api.libs.json.{JsObject, JsValue, Json} import java.io._ import java.nio.file.Paths +import java.util.Base64 import java.util.zip.Deflater import scala.collection.mutable import scala.concurrent.ExecutionContext import scala.concurrent.duration._ class VolumeTracingService @Inject()( - val tracingDataStore: TracingDataStore, - val tracingStoreWkRpcClient: TSRemoteWebknossosClient, - val adHocMeshServiceHolder: AdHocMeshServiceHolder, - implicit val temporaryTracingStore: TemporaryTracingStore[VolumeTracing], - implicit val temporaryVolumeDataStore: TemporaryVolumeDataStore, - implicit val ec: ExecutionContext, - val handledGroupIdStore: TracingStoreRedisStore, - val uncommittedUpdatesStore: TracingStoreRedisStore, - editableMappingService: EditableMappingService, - val temporaryTracingIdStore: TracingStoreRedisStore, + tracingDataStore: TracingDataStore, + adHocMeshServiceHolder: AdHocMeshServiceHolder, + temporaryFileCreator: TemporaryFileCreator, + volumeSegmentIndexService: VolumeSegmentIndexService, + val temporaryTracingService: TemporaryTracingService, val remoteDatastoreClient: TSRemoteDatastoreClient, - val remoteWebknossosClient: TSRemoteWebknossosClient, - val temporaryFileCreator: TemporaryFileCreator, - val tracingMigrationService: VolumeTracingMigrationService, - volumeSegmentIndexService: VolumeSegmentIndexService -) extends TracingService[VolumeTracing] - with VolumeTracingBucketHelper - with VolumeTracingDownsampling + val remoteWebknossosClient: TSRemoteWebknossosClient +)(implicit val ec: ExecutionContext) + extends VolumeTracingBucketHelper with WKWDataFormatHelper with FallbackDataHelper with DataFinder + with ColorGenerator + with BoundingBoxMerger with VolumeDataZipHelper with ProtoGeometryImplicits with FoxImplicits @@ -77,9 +59,6 @@ class VolumeTracingService @Inject()( implicit val tracingCompanion: VolumeTracing.type = VolumeTracing - implicit val updateActionJsonFormat: VolumeUpdateAction.volumeUpdateActionFormat.type = - VolumeUpdateAction.volumeUpdateActionFormat - val tracingType: TracingType = TracingType.volume val tracingStore: FossilDBClient = tracingDataStore.volumes @@ -93,14 +72,22 @@ class VolumeTracingService @Inject()( adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService - private val fallbackLayerCache: AlfuCache[String, Option[RemoteFallbackLayer]] = AlfuCache(maxCapacity = 100) + private val fallbackLayerCache: AlfuCache[(String, Option[String], Option[String]), Option[RemoteFallbackLayer]] = + AlfuCache(maxCapacity = 100) - override def currentVersion(tracingId: String): Fox[Long] = - tracingDataStore.volumes.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) - - override def currentVersion(tracing: VolumeTracing): Long = tracing.version + def saveVolume(tracing: VolumeTracing, + tracingId: Option[String], + version: Long, + toTemporaryStore: Boolean = false): Fox[String] = { + val id = tracingId.getOrElse(TracingId.generate) + if (toTemporaryStore) { + temporaryTracingService.saveVolume(id, tracing).map(_ => id) + } else { + tracingDataStore.volumes.put(id, version, tracing).map(_ => id) + } + } - override protected def updateSegmentIndex( + private def updateSegmentIndex( segmentIndexBuffer: VolumeSegmentIndexBuffer, bucketPosition: BucketPosition, bucketBytes: Array[Byte], @@ -116,75 +103,44 @@ class VolumeTracingService @Inject()( mappingName, editableMappingTracingId) ?~> "volumeSegmentIndex.update.failed" - def handleUpdateGroup(tracingId: String, - updateGroup: UpdateActionGroup[VolumeTracing], - previousVersion: Long, - userToken: Option[String]): Fox[Unit] = + def applyBucketMutatingActions(tracingId: String, + tracing: VolumeTracing, + updateActions: List[BucketMutatingVolumeUpdateAction], + newVersion: Long)(implicit tc: TokenContext): Fox[Unit] = for { // warning, may be called multiple times with the same version number (due to transaction management). // frontend ensures that each bucket is only updated once per transaction - fallbackLayer <- getFallbackLayer(tracingId) - tracing <- find(tracingId) ?~> "tracing.notFound" - segmentIndexBuffer <- Fox.successful( - new VolumeSegmentIndexBuffer( - tracingId, - volumeSegmentIndexClient, - updateGroup.version, - remoteDatastoreClient, - fallbackLayer, - AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), - userToken - )) - updatedTracing: VolumeTracing <- updateGroup.actions.foldLeft(find(tracingId)) { (tracingFox, action) => - tracingFox.futureBox.flatMap { - case Full(tracing) => - action match { - case a: UpdateBucketVolumeAction => - if (tracing.getHasEditableMapping) { - Fox.failure("Cannot mutate volume data in annotation with editable mapping.") - } else - updateBucket(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version) ?~> "Failed to save volume data." - case a: UpdateTracingVolumeAction => - Fox.successful( - tracing.copy( - activeSegmentId = Some(a.activeSegmentId), - editPosition = a.editPosition, - editRotation = a.editRotation, - largestSegmentId = a.largestSegmentId, - zoomLevel = a.zoomLevel, - editPositionAdditionalCoordinates = - AdditionalCoordinate.toProto(a.editPositionAdditionalCoordinates) - )) - case a: RevertToVersionVolumeAction => - revertToVolumeVersion(tracingId, a.sourceVersion, updateGroup.version, tracing, userToken) - case a: DeleteSegmentDataVolumeAction => - if (!tracing.getHasSegmentIndex) { - Fox.failure("Cannot delete segment data for annotations without segment index.") - } else - deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version, userToken) ?~> "Failed to delete segment data." - case _: UpdateTdCamera => Fox.successful(tracing) - case a: ApplyableVolumeAction => Fox.successful(a.applyOn(tracing)) - case _ => Fox.failure("Unknown action.") - } - case Empty => - Fox.empty - case f: Failure => - f.toFox - } + fallbackLayerOpt <- getFallbackLayer(tracingId, tracing) + segmentIndexBuffer = new VolumeSegmentIndexBuffer( + tracingId, + volumeSegmentIndexClient, + newVersion, + remoteDatastoreClient, + fallbackLayerOpt, + AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), + tc + ) + _ <- Fox.serialCombined(updateActions) { + case a: UpdateBucketVolumeAction => + if (tracing.getHasEditableMapping) { + Fox.failure("Cannot mutate volume data in annotation with editable mapping.") + } else + updateBucket(tracingId, tracing, a, segmentIndexBuffer, newVersion) ?~> "Failed to save volume data." + case a: DeleteSegmentDataVolumeAction => + if (!tracing.getHasSegmentIndex) { + Fox.failure("Cannot delete segment data for annotations without segment index.") + } else + deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, newVersion) ?~> "Failed to delete segment data." + case _ => Fox.failure("Unknown bucket-mutating action.") } _ <- segmentIndexBuffer.flush() - _ <- save(updatedTracing.copy(version = updateGroup.version), Some(tracingId), updateGroup.version) - _ <- tracingDataStore.volumeUpdates.put( - tracingId, - updateGroup.version, - updateGroup.actions.map(_.addTimestamp(updateGroup.timestamp)).map(_.transformToCompact)) - } yield Fox.successful(()) + } yield () private def updateBucket(tracingId: String, volumeTracing: VolumeTracing, action: UpdateBucketVolumeAction, segmentIndexBuffer: VolumeSegmentIndexBuffer, - updateGroupVersion: Long): Fox[VolumeTracing] = + updateGroupVersion: Long)(implicit tc: TokenContext): Fox[VolumeTracing] = for { _ <- assertMagIsValid(volumeTracing, action.mag) ?~> s"Received a mag-${action.mag.toMagLiteral(allowScalar = true)} bucket, which is invalid for this annotation." bucketPosition = BucketPosition(action.position.x, @@ -194,15 +150,16 @@ class VolumeTracingService @Inject()( action.additionalCoordinates) _ <- bool2Fox(!bucketPosition.hasNegativeComponent) ?~> s"Received a bucket at negative position ($bucketPosition), must be positive" dataLayer = volumeTracingLayer(tracingId, volumeTracing) - _ <- saveBucket(dataLayer, bucketPosition, action.data, updateGroupVersion) ?~> "failed to save bucket" - mappingName <- baseMappingName(volumeTracing) + actionBucketData <- action.base64Data.map(Base64.getDecoder.decode).toFox + _ <- saveBucket(dataLayer, bucketPosition, actionBucketData, updateGroupVersion) ?~> "failed to save bucket" + mappingName <- selectMappingName(volumeTracing) _ <- Fox.runIfOptionTrue(volumeTracing.hasSegmentIndex) { for { previousBucketBytes <- loadBucket(dataLayer, bucketPosition, Some(updateGroupVersion - 1L)).futureBox _ <- updateSegmentIndex( segmentIndexBuffer, bucketPosition, - action.data, + actionBucketData, previousBucketBytes, volumeTracing.elementClass, mappingName, @@ -212,35 +169,34 @@ class VolumeTracingService @Inject()( } } yield volumeTracing.copy(volumeBucketDataHasChanged = Some(true)) - override def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] = + def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] = if (tracing.getHasEditableMapping) Some(tracingId) else None - override def baseMappingName(tracing: VolumeTracing): Fox[Option[String]] = + private def selectMappingName(tracing: VolumeTracing): Fox[Option[String]] = if (tracing.getHasEditableMapping) - tracing.mappingName.map(editableMappingService.getBaseMappingName).getOrElse(Fox.successful(None)) + Fox.failure("mappingName called on volumeTracing with editableMapping!") else Fox.successful(tracing.mappingName) private def deleteSegmentData(tracingId: String, volumeTracing: VolumeTracing, a: DeleteSegmentDataVolumeAction, segmentIndexBuffer: VolumeSegmentIndexBuffer, - version: Long, - userToken: Option[String]): Fox[VolumeTracing] = + version: Long)(implicit tc: TokenContext): Fox[VolumeTracing] = for { _ <- Fox.successful(()) dataLayer = volumeTracingLayer(tracingId, volumeTracing) + fallbackLayer <- getFallbackLayer(tracingId, volumeTracing) possibleAdditionalCoordinates = AdditionalAxis.coordinateSpace(dataLayer.additionalAxes).map(Some(_)) additionalCoordinateList = if (possibleAdditionalCoordinates.isEmpty) { List(None) } else { possibleAdditionalCoordinates.toList } - mappingName <- baseMappingName(volumeTracing) + mappingName <- selectMappingName(volumeTracing) _ <- Fox.serialCombined(volumeTracing.mags.toList)(magProto => Fox.serialCombined(additionalCoordinateList)(additionalCoordinates => { val mag = vec3IntFromProto(magProto) for { - fallbackLayer <- getFallbackLayer(tracingId) bucketPositionsRaw <- volumeSegmentIndexService.getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( fallbackLayer, tracingId, @@ -250,8 +206,7 @@ class VolumeTracingService @Inject()( mappingName, editableMappingTracingId(volumeTracing, tracingId), additionalCoordinates, - dataLayer.additionalAxes, - userToken + dataLayer.additionalAxes ) bucketPositions = bucketPositionsRaw.values .map(vec3IntFromProto) @@ -290,34 +245,35 @@ class VolumeTracingService @Inject()( bool2Fox(mag.isIsotropic) } - private def revertToVolumeVersion(tracingId: String, - sourceVersion: Long, - newVersion: Long, - tracing: VolumeTracing, - userToken: Option[String]): Fox[VolumeTracing] = { + def revertVolumeData(tracingId: String, + sourceVersion: Long, + sourceTracing: VolumeTracing, + newVersion: Long, + tracingBeforeRevert: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = { + val before = Instant.now - val dataLayer = volumeTracingLayer(tracingId, tracing) - val bucketStream = dataLayer.volumeBucketProvider.bucketStreamWithVersion() + val dataLayer = volumeTracingLayer(tracingId, tracingBeforeRevert) + val bucketStreamBeforeRevert = + dataLayer.volumeBucketProvider.bucketStreamWithVersion(version = Some(tracingBeforeRevert.version)) for { - fallbackLayer <- getFallbackLayer(tracingId) + fallbackLayer <- getFallbackLayer(tracingId, tracingBeforeRevert) segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, newVersion, remoteDatastoreClient, fallbackLayer, dataLayer.additionalAxes, - userToken) - sourceTracing <- find(tracingId, Some(sourceVersion)) - mappingName <- baseMappingName(sourceTracing) - _ <- Fox.serialCombined(bucketStream) { + tc) + mappingName <- selectMappingName(sourceTracing) + _ <- Fox.serialCombined(bucketStreamBeforeRevert) { case (bucketPosition, dataBeforeRevert, version) => if (version > sourceVersion) { loadBucket(dataLayer, bucketPosition, Some(sourceVersion)).futureBox.map { case Full(dataAfterRevert) => for { _ <- saveBucket(dataLayer, bucketPosition, dataAfterRevert, newVersion) - _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( + _ <- Fox.runIfOptionTrue(tracingBeforeRevert.hasSegmentIndex)( updateSegmentIndex( segmentIndexBuffer, bucketPosition, @@ -330,9 +286,9 @@ class VolumeTracingService @Inject()( } yield () case Empty => for { - dataAfterRevert <- Fox.successful(Array[Byte](0)) + dataAfterRevert <- Fox.successful(revertedValue) _ <- saveBucket(dataLayer, bucketPosition, dataAfterRevert, newVersion) - _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( + _ <- Fox.runIfOptionTrue(tracingBeforeRevert.hasSegmentIndex)( updateSegmentIndex( segmentIndexBuffer, bucketPosition, @@ -348,13 +304,15 @@ class VolumeTracingService @Inject()( } else Fox.successful(()) } _ <- segmentIndexBuffer.flush() - } yield sourceTracing + _ = Instant.logSince( + before, + s"Reverting volume data of $tracingId from v${tracingBeforeRevert.version} to v$sourceVersion, creating v$newVersion") + } yield () } - def initializeWithDataMultiple(tracingId: String, - tracing: VolumeTracing, - initialData: File, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[Set[Vec3Int]] = + def initializeWithDataMultiple(tracingId: String, tracing: VolumeTracing, initialData: File)( + implicit mp: MessagesProvider, + tc: TokenContext): Fox[Set[Vec3Int]] = if (tracing.version != 0L) Failure("Tracing has already been edited.") else { @@ -367,7 +325,7 @@ class VolumeTracingService @Inject()( _ = if (magSet.nonEmpty) magSets.add(magSet) } yield () } - mappingName <- baseMappingName(tracing) + mappingName <- selectMappingName(tracing) mags <- // if none of the tracings contained any volume data do not save buckets, use full mag list, as already initialized on wk-side if (magSets.isEmpty) @@ -391,7 +349,7 @@ class VolumeTracingService @Inject()( mergedVolume.largestSegmentId.toLong, tracing.elementClass) destinationDataLayer = volumeTracingLayer(tracingId, tracing) - fallbackLayer <- getFallbackLayer(tracingId) + fallbackLayer <- getFallbackLayer(tracingId, tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, volumeSegmentIndexClient, @@ -399,7 +357,7 @@ class VolumeTracingService @Inject()( remoteDatastoreClient, fallbackLayer, AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), - userToken + tc ) _ <- mergedVolume.withMergedBuckets { (bucketPosition, bytes) => for { @@ -424,16 +382,15 @@ class VolumeTracingService @Inject()( def initializeWithData(tracingId: String, tracing: VolumeTracing, initialData: File, - magRestrictions: MagRestrictions, - userToken: Option[String]): Fox[Set[Vec3Int]] = + magRestrictions: MagRestrictions)(implicit tc: TokenContext): Fox[Set[Vec3Int]] = if (tracing.version != 0L) { Failure("Tracing has already been edited.") } else { val dataLayer = volumeTracingLayer(tracingId, tracing) val savedMags = new mutable.HashSet[Vec3Int]() for { - fallbackLayer <- getFallbackLayer(tracingId) - mappingName <- baseMappingName(tracing) + fallbackLayer <- getFallbackLayer(tracingId, tracing) + mappingName <- selectMappingName(tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, volumeSegmentIndexClient, @@ -441,7 +398,7 @@ class VolumeTracingService @Inject()( remoteDatastoreClient, fallbackLayer, AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), - userToken + tc ) _ <- withBucketsFromZip(initialData) { (bucketPosition, bytes) => if (magRestrictions.isForbidden(bucketPosition.mag)) { @@ -471,10 +428,11 @@ class VolumeTracingService @Inject()( } } - def allDataZip(tracingId: String, - tracing: VolumeTracing, - volumeDataZipFormat: VolumeDataZipFormat, - voxelSize: Option[VoxelSize])(implicit ec: ExecutionContext): Fox[Files.TemporaryFile] = { + def allDataZip( + tracingId: String, + tracing: VolumeTracing, + volumeDataZipFormat: VolumeDataZipFormat, + voxelSize: Option[VoxelSize])(implicit ec: ExecutionContext, tc: TokenContext): Fox[Files.TemporaryFile] = { val zipped = temporaryFileCreator.create(tracingId, ".zip") val os = new BufferedOutputStream(new FileOutputStream(new File(zipped.path.toString))) allDataToOutputStream(tracingId, tracing, volumeDataZipFormat, voxelSize, os).map(_ => zipped) @@ -484,7 +442,7 @@ class VolumeTracingService @Inject()( tracing: VolumeTracing, volumeDataZipFormmat: VolumeDataZipFormat, voxelSize: Option[VoxelSize], - os: OutputStream)(implicit ec: ExecutionContext): Fox[Unit] = { + os: OutputStream)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = { val dataLayer = volumeTracingLayer(tracingId, tracing) val buckets: Iterator[NamedStream] = volumeDataZipFormmat match { case VolumeDataZipFormat.wkw => @@ -509,96 +467,96 @@ class VolumeTracingService @Inject()( zipResult } - def isTemporaryTracing(tracingId: String): Fox[Boolean] = - temporaryTracingIdStore.contains(temporaryIdKey(tracingId)) - def data(tracingId: String, tracing: VolumeTracing, dataRequests: DataRequestCollection, - includeFallbackDataIfAvailable: Boolean = false, - userToken: Option[String] = None): Fox[(Array[Byte], List[Int])] = + includeFallbackDataIfAvailable: Boolean = false)(implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = for { - isTemporaryTracing <- isTemporaryTracing(tracingId) - dataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing, includeFallbackDataIfAvailable, userToken) + isTemporaryTracing <- temporaryTracingService.isTemporaryTracing(tracingId) + dataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing, includeFallbackDataIfAvailable) requests = dataRequests.map(r => DataServiceDataRequest(null, dataLayer, r.cuboid(dataLayer), r.settings.copy(appliedAgglomerate = None))) data <- binaryDataService.handleDataRequests(requests) } yield data - def duplicate(tracingId: String, - sourceTracing: VolumeTracing, - fromTask: Boolean, - datasetBoundingBox: Option[BoundingBox], - magRestrictions: MagRestrictions, - editPosition: Option[Vec3Int], - editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox], - mappingName: Option[String], - userToken: Option[String]): Fox[(String, VolumeTracing)] = { - val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, fromTask, datasetBoundingBox) - val tracingWithMagRestrictions = restrictMagList(tracingWithBB, magRestrictions) + def adaptVolumeForDuplicate(sourceTracingId: String, + newTracingId: String, + sourceTracing: VolumeTracing, + isFromTask: Boolean, + boundingBox: Option[BoundingBox], + datasetBoundingBox: Option[BoundingBox], + magRestrictions: MagRestrictions, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double], + newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[VolumeTracing] = { + val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, isFromTask, datasetBoundingBox) + val tracingWithMagRestrictions = VolumeTracingMags.restrictMagList(tracingWithBB, magRestrictions) for { - fallbackLayer <- getFallbackLayer(tracingId) - hasSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, fallbackLayer, userToken) + fallbackLayer <- getFallbackLayer(sourceTracingId, sourceTracing) + hasSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, fallbackLayer) newTracing = tracingWithMagRestrictions.copy( createdTimestamp = System.currentTimeMillis(), - editPosition = editPosition.map(vec3IntToProto).getOrElse(tracingWithMagRestrictions.editPosition), - editRotation = editRotation.map(vec3DoubleToProto).getOrElse(tracingWithMagRestrictions.editRotation), - boundingBox = boundingBoxOptToProto(boundingBox).getOrElse(tracingWithMagRestrictions.boundingBox), - mappingName = mappingName.orElse(tracingWithMagRestrictions.mappingName), - version = 0, + editPosition = editPosition.map(vec3IntToProto).getOrElse(sourceTracing.editPosition), + editRotation = editRotation.map(vec3DoubleToProto).getOrElse(sourceTracing.editRotation), + boundingBox = boundingBoxOptToProto(boundingBox).getOrElse(sourceTracing.boundingBox), + mappingName = + if (sourceTracing.getHasEditableMapping) Some(newTracingId) + else sourceTracing.mappingName, + version = newVersion, // Adding segment index on duplication if the volume tracing allows it. This will be used in duplicateData hasSegmentIndex = Some(hasSegmentIndex) ) _ <- bool2Fox(newTracing.mags.nonEmpty) ?~> "magRestrictions.tooTight" - newId <- save(newTracing, None, newTracing.version) - _ <- duplicateData(tracingId, sourceTracing, newId, newTracing, userToken) - } yield (newId, newTracing) + } yield newTracing } - @SuppressWarnings(Array("OptionGet")) //We suppress this warning because we check the option beforehand private def addBoundingBoxFromTaskIfRequired(tracing: VolumeTracing, - fromTask: Boolean, - datasetBoundingBox: Option[BoundingBox]): VolumeTracing = - if (fromTask && datasetBoundingBox.isDefined) { - val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 - tracing - .addUserBoundingBoxes( - NamedBoundingBoxProto(newId, - Some("task bounding box"), - Some(true), - Some(getRandomColor), - tracing.boundingBox)) - .withBoundingBox(datasetBoundingBox.get) - } else tracing + isFromTask: Boolean, + datasetBoundingBoxOpt: Option[BoundingBox]): VolumeTracing = + datasetBoundingBoxOpt match { + case Some(datasetBoundingBox) if isFromTask => + val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 + tracing + .addUserBoundingBoxes( + NamedBoundingBoxProto(newId, + Some("task bounding box"), + Some(true), + Some(getRandomColor), + tracing.boundingBox)) + .withBoundingBox(datasetBoundingBox) + case _ => tracing + } - private def duplicateData(sourceId: String, - sourceTracing: VolumeTracing, - destinationId: String, - destinationTracing: VolumeTracing, - userToken: Option[String]): Fox[Unit] = + def duplicateVolumeData(sourceTracingId: String, + sourceTracing: VolumeTracing, + newTracingId: String, + newTracing: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = { + var bucketCount = 0 + val before = Instant.now for { - isTemporaryTracing <- isTemporaryTracing(sourceId) - sourceDataLayer = volumeTracingLayer(sourceId, sourceTracing, isTemporaryTracing) - buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() - destinationDataLayer = volumeTracingLayer(destinationId, destinationTracing) - fallbackLayer <- getFallbackLayer(sourceId) + isTemporaryTracing <- temporaryTracingService.isTemporaryTracing(sourceTracingId) + sourceDataLayer = volumeTracingLayer(sourceTracingId, sourceTracing, isTemporaryTracing) + buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream( + Some(sourceTracing.version)) + destinationDataLayer = volumeTracingLayer(newTracingId, newTracing) + fallbackLayer <- getFallbackLayer(sourceTracingId, sourceTracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( - destinationId, + newTracingId, volumeSegmentIndexClient, - destinationTracing.version, + newTracing.version, remoteDatastoreClient, fallbackLayer, AdditionalAxis.fromProtosAsOpt(sourceTracing.additionalAxes), - userToken + tc ) - mappingName <- baseMappingName(sourceTracing) + mappingName <- selectMappingName(sourceTracing) _ <- Fox.serialCombined(buckets) { case (bucketPosition, bucketData) => - if (destinationTracing.mags.contains(vec3IntToProto(bucketPosition.mag))) { + if (newTracing.mags.contains(vec3IntToProto(bucketPosition.mag))) { for { - _ <- saveBucket(destinationDataLayer, bucketPosition, bucketData, destinationTracing.version) - _ <- Fox.runIfOptionTrue(destinationTracing.hasSegmentIndex)( + _ <- saveBucket(destinationDataLayer, bucketPosition, bucketData, newTracing.version) + _ = bucketCount += 1 + _ <- Fox.runIfOptionTrue(newTracing.hasSegmentIndex)( updateSegmentIndex( segmentIndexBuffer, bucketPosition, @@ -606,90 +564,59 @@ class VolumeTracingService @Inject()( Empty, sourceTracing.elementClass, mappingName, - editableMappingTracingId(sourceTracing, sourceId) + editableMappingTracingId(sourceTracing, sourceTracingId) )) } yield () } else Fox.successful(()) } + _ = Instant.logSince( + before, + s"Duplicating $bucketCount volume buckets from $sourceTracingId v${sourceTracing.version} to $newTracingId v${newTracing.version}.") _ <- segmentIndexBuffer.flush() } yield () + } - private def volumeTracingLayer(tracingId: String, - tracing: VolumeTracing, - isTemporaryTracing: Boolean = false, - includeFallbackDataIfAvailable: Boolean = false, - userToken: Option[String] = None): VolumeTracingLayer = + private def volumeTracingLayer( + tracingId: String, + tracing: VolumeTracing, + isTemporaryTracing: Boolean = false, + includeFallbackDataIfAvailable: Boolean = false)(implicit tc: TokenContext): VolumeTracingLayer = VolumeTracingLayer( name = tracingId, isTemporaryTracing = isTemporaryTracing, volumeTracingService = this, + temporaryTracingService = this.temporaryTracingService, + volumeDataStore = volumeDataStore, includeFallbackDataIfAvailable = includeFallbackDataIfAvailable, tracing = tracing, - userToken = userToken, + tokenContext = tc, additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes) ) - def updateActionLog(tracingId: String, - newestVersion: Option[Long] = None, - oldestVersion: Option[Long] = None): Fox[JsValue] = { - def versionedTupleToJson(tuple: (Long, List[CompactVolumeUpdateAction])): JsObject = - Json.obj( - "version" -> tuple._1, - "value" -> Json.toJson(tuple._2) - ) - - for { - volumeTracings <- tracingDataStore.volumeUpdates.getMultipleVersionsAsVersionValueTuple( - tracingId, - newestVersion, - oldestVersion)(fromJsonBytes[List[CompactVolumeUpdateAction]]) - updateActionGroupsJs = volumeTracings.map(versionedTupleToJson) - } yield Json.toJson(updateActionGroupsJs) - } - - def updateMagList(tracingId: String, - tracing: VolumeTracing, - mags: Set[Vec3Int], - toCache: Boolean = false): Fox[String] = + def updateMagList(tracingId: String, tracing: VolumeTracing, mags: Set[Vec3Int]): Fox[String] = for { _ <- bool2Fox(tracing.version == 0L) ?~> "Tracing has already been edited." - _ <- bool2Fox(mags.nonEmpty) ?~> "Mag restrictions result in zero mags" - id <- save(tracing.copy(mags = mags.toList.sortBy(_.maxDim).map(vec3IntToProto)), - Some(tracingId), - tracing.version, - toCache) + _ <- bool2Fox(mags.nonEmpty) ?~> "Initializing without any mags. No data or mag restrictions too tight?" + id <- saveVolume(tracing.copy(mags = mags.toList.sortBy(_.maxDim).map(vec3IntToProto)), + Some(tracingId), + tracing.version) } yield id - def downsample(tracingId: String, - oldTracingId: String, - tracing: VolumeTracing, - userToken: Option[String]): Fox[Unit] = - for { - resultingMags <- downsampleWithLayer(tracingId, - oldTracingId, - tracing, - volumeTracingLayer(tracingId, tracing), - this, - userToken) - _ <- updateMagList(tracingId, tracing, resultingMags.toSet) - } yield () - def volumeBucketsAreEmpty(tracingId: String): Boolean = volumeDataStore.getMultipleKeys(None, Some(tracingId), limit = Some(1))(toBox).isEmpty - def createAdHocMesh(tracingId: String, - request: WebknossosAdHocMeshRequest, - userToken: Option[String]): Fox[(Array[Float], List[Int])] = + def createAdHocMesh(tracingId: String, tracing: VolumeTracing, request: WebknossosAdHocMeshRequest)( + implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = for { - tracing <- find(tracingId) ?~> "tracing.notFound" - segmentationLayer = volumeTracingLayer(tracingId, - tracing, - includeFallbackDataIfAvailable = true, - userToken = userToken) + isTemporaryTracing <- temporaryTracingService.isTemporaryTracing(tracingId) + volumeLayer = volumeTracingLayer(tracingId, + tracing, + includeFallbackDataIfAvailable = true, + isTemporaryTracing = isTemporaryTracing) adHocMeshRequest = AdHocMeshRequest( None, - segmentationLayer, - request.cuboid(segmentationLayer), + volumeLayer, + request.cuboid(volumeLayer), request.segmentId, request.voxelSizeFactorInUnit, None, @@ -700,10 +627,11 @@ class VolumeTracingService @Inject()( result <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) } yield result - def findData(tracingId: String): Fox[Option[Vec3Int]] = + def findData(tracingId: String, tracing: VolumeTracing)(implicit tc: TokenContext): Fox[Option[Vec3Int]] = for { - tracing <- find(tracingId) ?~> "tracing.notFound" - volumeLayer = volumeTracingLayer(tracingId, tracing) + _ <- Fox.successful(()) + isTemporaryTracing <- temporaryTracingService.isTemporaryTracing(tracingId) + volumeLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing = isTemporaryTracing) bucketStream = volumeLayer.bucketProvider.bucketStream(Some(tracing.version)) bucketPosOpt = if (bucketStream.hasNext) { val bucket = bucketStream.next() @@ -716,7 +644,8 @@ class VolumeTracingService @Inject()( def merge(tracings: Seq[VolumeTracing], mergedVolumeStats: MergedVolumeStats, - newEditableMappingIdOpt: Option[String]): Box[VolumeTracing] = { + newEditableMappingIdOpt: Option[String], + newVersion: Long): Box[VolumeTracing] = { def mergeTwoWithStats(tracingAWithIndex: Box[(VolumeTracing, Int)], tracingBWithIndex: Box[(VolumeTracing, Int)]): Box[(VolumeTracing, Int)] = for { @@ -732,7 +661,7 @@ class VolumeTracingService @Inject()( } yield tracing.copy( createdTimestamp = System.currentTimeMillis(), - version = 0L, + version = newVersion, mappingName = newEditableMappingIdOpt, hasSegmentIndex = Some(mergedVolumeStats.createdSegmentIndex) ) @@ -786,25 +715,26 @@ class VolumeTracingService @Inject()( case (None, None) => None } - private def bucketStreamFromSelector(selector: TracingSelector, - tracing: VolumeTracing): Iterator[(BucketPosition, Array[Byte])] = { - val dataLayer = volumeTracingLayer(selector.tracingId, tracing) + private def bucketStreamFor(tracingId: String, tracing: VolumeTracing)( + implicit tc: TokenContext): Iterator[(BucketPosition, Array[Byte])] = { + val dataLayer = volumeTracingLayer(tracingId, tracing) dataLayer.bucketProvider.bucketStream(Some(tracing.version)) } - def mergeVolumeData(tracingSelectors: Seq[TracingSelector], - tracings: Seq[VolumeTracing], - newId: String, - newVersion: Long, - toCache: Boolean, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[MergedVolumeStats] = { + def mergeVolumeData( + tracingIds: Seq[String], + tracings: Seq[VolumeTracing], + newId: String, + newVersion: Long, + toTemporaryStore: Boolean)(implicit mp: MessagesProvider, tc: TokenContext): Fox[MergedVolumeStats] = { + val before = Instant.now val elementClass = tracings.headOption.map(_.elementClass).getOrElse(elementClassToProto(ElementClass.uint8)) val magSets = new mutable.HashSet[Set[Vec3Int]]() - tracingSelectors.zip(tracings).foreach { - case (selector, tracing) => + tracingIds.zip(tracings).foreach { + case (tracingId, tracing) => val magSet = new mutable.HashSet[Vec3Int]() - bucketStreamFromSelector(selector, tracing).foreach { + bucketStreamFor(tracingId, tracing).foreach { case (bucketPosition, _) => magSet.add(bucketPosition.mag) } @@ -815,9 +745,6 @@ class VolumeTracingService @Inject()( val shouldCreateSegmentIndex = volumeSegmentIndexService.shouldCreateSegmentIndexForMerged(tracings) - logger.info( - s"Merging ${tracings.length} volume tracings into new $newId. CreateSegmentIndex = $shouldCreateSegmentIndex") - // If none of the tracings contained any volume data. Do not save buckets, do not touch mag list if (magSets.isEmpty) Fox.successful(MergedVolumeStats.empty(shouldCreateSegmentIndex)) @@ -830,15 +757,15 @@ class VolumeTracingService @Inject()( val mergedVolume = new MergedVolume(elementClass) - tracingSelectors.zip(tracings).foreach { - case (selector, tracing) => - val bucketStream = bucketStreamFromSelector(selector, tracing) + tracingIds.zip(tracings).foreach { + case (tracingId, tracing) => + val bucketStream = bucketStreamFor(tracingId, tracing) mergedVolume.addLabelSetFromBucketStream(bucketStream, magsIntersection) } - tracingSelectors.zip(tracings).zipWithIndex.foreach { - case ((selector, tracing), sourceVolumeIndex) => - val bucketStream = bucketStreamFromSelector(selector, tracing) + tracingIds.zip(tracings).zipWithIndex.foreach { + case ((tracingIds, tracing), sourceVolumeIndex) => + val bucketStream = bucketStreamFor(tracingIds, tracing) mergedVolume.addFromBucketStream(sourceVolumeIndex, bucketStream, Some(magsIntersection)) } for { @@ -848,17 +775,25 @@ class VolumeTracingService @Inject()( elementClass) mergedAdditionalAxes <- Fox.box2Fox(AdditionalAxis.mergeAndAssertSameAdditionalAxes(tracings.map(t => AdditionalAxis.fromProtosAsOpt(t.additionalAxes)))) - fallbackLayer <- getFallbackLayer(tracingSelectors.head.tracingId) + firstTracingId <- tracingIds.headOption ?~> "merge.noTracings" + firstTracing <- tracings.headOption ?~> "merge.noTracings" + fallbackLayer <- getFallbackLayer(firstTracingId, firstTracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer(newId, volumeSegmentIndexClient, newVersion, remoteDatastoreClient, fallbackLayer, mergedAdditionalAxes, - userToken) + tc) _ <- mergedVolume.withMergedBuckets { (bucketPosition, bucketBytes) => for { - _ <- saveBucket(newId, elementClass, bucketPosition, bucketBytes, newVersion, toCache, mergedAdditionalAxes) + _ <- saveBucket(newId, + elementClass, + bucketPosition, + bucketBytes, + newVersion, + toTemporaryStore, + mergedAdditionalAxes) _ <- Fox.runIf(shouldCreateSegmentIndex)( updateSegmentIndex(segmentIndexBuffer, bucketPosition, @@ -870,78 +805,22 @@ class VolumeTracingService @Inject()( } yield () } _ <- segmentIndexBuffer.flush() + _ = Instant.logSince( + before, + s"Merging buckets from ${tracings.length} volume tracings into new $newId, with createSegmentIndex = $shouldCreateSegmentIndex") } yield mergedVolume.stats(shouldCreateSegmentIndex) } } - def addSegmentIndex(tracingId: String, - tracing: VolumeTracing, - currentVersion: Long, - userToken: Option[String], - dryRun: Boolean): Fox[Option[Int]] = { - var processedBucketCount = 0 - for { - isTemporaryTracing <- isTemporaryTracing(tracingId) - sourceDataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing) - buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() - fallbackLayer <- getFallbackLayer(tracingId) - mappingName <- baseMappingName(tracing) - segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, - volumeSegmentIndexClient, - currentVersion + 1L, - remoteDatastoreClient, - fallbackLayer, - sourceDataLayer.additionalAxes, - userToken) - _ <- Fox.serialCombined(buckets) { - case (bucketPosition, bucketData) => - processedBucketCount += 1 - updateSegmentIndex(segmentIndexBuffer, - bucketPosition, - bucketData, - Empty, - tracing.elementClass, - mappingName, - editableMappingTracingId(tracing, tracingId)) - } - _ <- Fox.runIf(!dryRun)(segmentIndexBuffer.flush()) - updateGroup = UpdateActionGroup[VolumeTracing]( - tracing.version + 1L, - System.currentTimeMillis(), - None, - List(AddSegmentIndex()), - None, - None, - "dummyTransactionId", - 1, - 0 - ) - _ <- Fox.runIf(!dryRun)(handleUpdateGroup(tracingId, updateGroup, tracing.version, userToken)) - } yield Some(processedBucketCount) - } - - def checkIfSegmentIndexMayBeAdded(tracingId: String, tracing: VolumeTracing, userToken: Option[String])( - implicit ec: ExecutionContext): Fox[Boolean] = - for { - fallbackLayerOpt <- Fox.runIf(tracing.fallbackLayer.isDefined)( - remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) - canHaveSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, - fallbackLayerOpt, - userToken) - alreadyHasSegmentIndex = tracing.hasSegmentIndex.getOrElse(false) - } yield canHaveSegmentIndex && !alreadyHasSegmentIndex - - def importVolumeData(tracingId: String, - tracing: VolumeTracing, - zipFile: File, - currentVersion: Int, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[Long] = + def importVolumeData(tracingId: String, tracing: VolumeTracing, zipFile: File, currentVersion: Int)( + implicit mp: MessagesProvider, + tc: TokenContext): Fox[Long] = if (currentVersion != tracing.version) Fox.failure("version.mismatch") else { val magSet = magSetFromZipfile(zipFile) val magsDoMatch = - magSet.isEmpty || magSet == resolveLegacyMagList(tracing.mags).map(vec3IntFromProto).toSet + magSet.isEmpty || magSet == VolumeTracingMags.resolveLegacyMagList(tracing.mags).map(vec3IntFromProto).toSet if (!magsDoMatch) Fox.failure("annotation.volume.magssDoNotMatch") @@ -960,8 +839,8 @@ class VolumeTracingService @Inject()( mergedVolume.largestSegmentId.toLong, tracing.elementClass) dataLayer = volumeTracingLayer(tracingId, tracing) - fallbackLayer <- getFallbackLayer(tracingId) - mappingName <- baseMappingName(tracing) + fallbackLayer <- getFallbackLayer(tracingId, tracing) + mappingName <- selectMappingName(tracing) segmentIndexBuffer <- Fox.successful( new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, @@ -969,7 +848,7 @@ class VolumeTracingService @Inject()( remoteDatastoreClient, fallbackLayer, dataLayer.additionalAxes, - userToken)) + tc)) _ <- mergedVolume.withMergedBuckets { (bucketPosition, bucketBytes) => for { _ <- saveBucket(volumeLayer, bucketPosition, bucketBytes, tracing.version + 1) @@ -990,54 +869,25 @@ class VolumeTracingService @Inject()( } yield () } _ <- segmentIndexBuffer.flush() - updateGroup = UpdateActionGroup[VolumeTracing]( - tracing.version + 1, - System.currentTimeMillis(), - None, - List(ImportVolumeData(Some(mergedVolume.largestSegmentId.toPositiveLong))), - None, - None, - "dummyTransactionId", - 1, - 0 - ) - _ <- handleUpdateGroup(tracingId, updateGroup, tracing.version, userToken) } yield mergedVolume.largestSegmentId.toPositiveLong } } - def dummyTracing: VolumeTracing = ??? + def getFallbackLayer(tracingId: String, tracing: VolumeTracing)( + implicit tc: TokenContext): Fox[Option[RemoteFallbackLayer]] = + fallbackLayerCache.getOrLoad((tracingId, tracing.fallbackLayer, tc.userTokenOpt), + t => getFallbackLayerFromWebknossos(t._1, t._2)) - def mergeEditableMappings(tracingsWithIds: List[(VolumeTracing, String)], userToken: Option[String]): Fox[String] = - if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { + private def getFallbackLayerFromWebknossos(tracingId: String, fallbackLayerName: Option[String])( + implicit tc: TokenContext) = + Fox[Option[RemoteFallbackLayer]] { for { - remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => - remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) - remoteFallbackLayer <- remoteFallbackLayers.headOption.toFox - _ <- bool2Fox(remoteFallbackLayers.forall(_ == remoteFallbackLayer)) ?~> "Cannot merge editable mappings based on different dataset layers" - editableMappingIds <- Fox.serialCombined(tracingsWithIds)(tracingWithId => tracingWithId._1.mappingName) - _ <- bool2Fox(editableMappingIds.length == tracingsWithIds.length) ?~> "Not all volume tracings have editable mappings" - newEditableMappingId <- editableMappingService.merge(editableMappingIds, remoteFallbackLayer, userToken) - } yield newEditableMappingId - } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { - Fox.empty - } else { - Fox.failure("Cannot merge tracings with and without editable mappings") + dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) + dataSourceId = dataSource.id + fallbackLayer = dataSource.dataLayers + .find(_.name == fallbackLayerName.getOrElse("")) + .map(RemoteFallbackLayer.fromDataLayerAndDataSource(_, dataSourceId)) + } yield fallbackLayer } - def getFallbackLayer(tracingId: String): Fox[Option[RemoteFallbackLayer]] = - fallbackLayerCache.getOrLoad(tracingId, t => getFallbackLayerFromWebknossos(t)) - - private def getFallbackLayerFromWebknossos(tracingId: String) = Fox[Option[RemoteFallbackLayer]] { - for { - tracing <- find(tracingId) - dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) - dataSourceId = dataSource.id - fallbackLayerName = tracing.fallbackLayer - fallbackLayer = dataSource.dataLayers - .find(_.name == fallbackLayerName.getOrElse("")) - .map(RemoteFallbackLayer.fromDataLayerAndDataSource(_, dataSourceId)) - } yield fallbackLayer - } - } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index d35b3cf7da8..f7cb2a2808f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -1,13 +1,12 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume -import java.util.Base64 import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.webknossos.datastore.VolumeTracing.{Segment, SegmentGroup, VolumeTracing} -import com.scalableminds.webknossos.datastore.geometry +import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate -import com.scalableminds.webknossos.tracingstore.tracings.UpdateAction.VolumeUpdateAction -import com.scalableminds.webknossos.tracingstore.tracings.{NamedBoundingBox, UpdateAction, MetadataEntry} +import com.scalableminds.webknossos.tracingstore.annotation.{LayerUpdateAction, UpdateAction} +import com.scalableminds.webknossos.tracingstore.tracings.{NamedBoundingBox, MetadataEntry} import play.api.libs.json._ trait VolumeUpdateActionHelper { @@ -26,29 +25,34 @@ trait VolumeUpdateActionHelper { } -trait ApplyableVolumeAction extends VolumeUpdateAction +trait VolumeUpdateAction extends LayerUpdateAction + +trait ApplyableVolumeUpdateAction extends VolumeUpdateAction { + def applyOn(tracing: VolumeTracing): VolumeTracing +} + +trait BucketMutatingVolumeUpdateAction extends VolumeUpdateAction case class UpdateBucketVolumeAction(position: Vec3Int, cubeSize: Int, mag: Vec3Int, - base64Data: String, + base64Data: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) - extends VolumeUpdateAction { - lazy val data: Array[Byte] = Base64.getDecoder.decode(base64Data) + info: Option[String] = None) + extends BucketMutatingVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateBucket", actionTimestamp, actionAuthorId, Json.obj()) -} - -object UpdateBucketVolumeAction { - implicit val jsonFormat: OFormat[UpdateBucketVolumeAction] = Json.format[UpdateBucketVolumeAction] + def withoutBase64Data: UpdateBucketVolumeAction = + this.copy(base64Data = None) } case class UpdateTracingVolumeAction( @@ -57,86 +61,68 @@ case class UpdateTracingVolumeAction( editRotation: Vec3Double, largestSegmentId: Option[Long], zoomLevel: Double, + editPositionAdditionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - editPositionAdditionalCoordinates: Option[Seq[AdditionalCoordinate]] = None -) extends VolumeUpdateAction { + info: Option[String] = None +) extends ApplyableVolumeUpdateAction + with ProtoGeometryImplicits { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateTracing", actionTimestamp, actionAuthorId, Json.obj()) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def isViewOnlyChange: Boolean = true -} - -object UpdateTracingVolumeAction { - implicit val jsonFormat: OFormat[UpdateTracingVolumeAction] = Json.format[UpdateTracingVolumeAction] -} - -case class RevertToVersionVolumeAction(sourceVersion: Long, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends VolumeUpdateAction { - override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = - this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("revertToVersion", - actionTimestamp, - actionAuthorId, - Json.obj("sourceVersion" -> sourceVersion)) -} -object RevertToVersionVolumeAction { - implicit val jsonFormat: OFormat[RevertToVersionVolumeAction] = Json.format[RevertToVersionVolumeAction] -} - -case class UpdateUserBoundingBoxes(boundingBoxes: List[NamedBoundingBox], - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends ApplyableVolumeAction { + override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.copy( + activeSegmentId = Some(activeSegmentId), + editPosition = editPosition, + editRotation = editRotation, + largestSegmentId = largestSegmentId, + zoomLevel = zoomLevel, + editPositionAdditionalCoordinates = AdditionalCoordinate.toProto(editPositionAdditionalCoordinates) + ) +} + +case class UpdateUserBoundingBoxesVolumeAction(boundingBoxes: List[NamedBoundingBox], + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateUserBoundingBoxes", actionTimestamp, actionAuthorId, Json.obj()) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto)) } -object UpdateUserBoundingBoxes { - implicit val jsonFormat: OFormat[UpdateUserBoundingBoxes] = Json.format[UpdateUserBoundingBoxes] -} - -case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], - isVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends ApplyableVolumeAction { +case class UpdateUserBoundingBoxVisibilityVolumeAction(boundingBoxId: Option[Int], + isVisible: Boolean, + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateUserBoundingBoxVisibility", - actionTimestamp, - actionAuthorId, - Json.obj("boundingBoxId" -> boundingBoxId, "newVisibility" -> isVisible)) - override def isViewOnlyChange: Boolean = true + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = { - def updateUserBoundingBoxes(): Seq[geometry.NamedBoundingBoxProto] = + def updateUserBoundingBoxes(): Seq[NamedBoundingBoxProto] = tracing.userBoundingBoxes.map { boundingBox => if (boundingBoxId.forall(_ == boundingBox.id)) boundingBox.copy(isVisible = Some(isVisible)) @@ -146,92 +132,59 @@ case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], tracing.withUserBoundingBoxes(updateUserBoundingBoxes()) } -} -object UpdateUserBoundingBoxVisibility { - implicit val jsonFormat: OFormat[UpdateUserBoundingBoxVisibility] = Json.format[UpdateUserBoundingBoxVisibility] + override def isViewOnlyChange: Boolean = true } -case class RemoveFallbackLayer(actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends ApplyableVolumeAction { +case class RemoveFallbackLayerVolumeAction(actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("removeFallbackLayer", actionTimestamp, actionAuthorId, Json.obj()) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.clearFallbackLayer } -object RemoveFallbackLayer { - implicit val jsonFormat: OFormat[RemoveFallbackLayer] = Json.format[RemoveFallbackLayer] -} - -case class ImportVolumeData(largestSegmentId: Option[Long], - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends ApplyableVolumeAction { +case class ImportVolumeDataVolumeAction(actionTracingId: String, + largestSegmentId: Option[Long], + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("importVolumeTracing", - actionTimestamp, - actionAuthorId, - Json.obj("largestSegmentId" -> largestSegmentId)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.copy(largestSegmentId = largestSegmentId) } -object ImportVolumeData { - implicit val jsonFormat: OFormat[ImportVolumeData] = Json.format[ImportVolumeData] -} - -case class AddSegmentIndex(actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends ApplyableVolumeAction { +// The current code no longer creates these actions, but they are in the history of some volume annotations. +case class AddSegmentIndexVolumeAction(actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("addSegmentIndex", actionTimestamp, actionAuthorId, Json.obj()) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.copy(hasSegmentIndex = Some(true)) -} -object AddSegmentIndex { - implicit val jsonFormat: OFormat[AddSegmentIndex] = Json.format[AddSegmentIndex] -} - -case class UpdateTdCamera(actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends VolumeUpdateAction { - - override def addTimestamp(timestamp: Long): VolumeUpdateAction = - this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = - this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateTdCamera", actionTimestamp, actionAuthorId, Json.obj()) - - override def isViewOnlyChange: Boolean = true -} - -object UpdateTdCamera { - implicit val jsonFormat: OFormat[UpdateTdCamera] = Json.format[UpdateTdCamera] } case class CreateSegmentVolumeAction(id: Long, @@ -240,20 +193,22 @@ case class CreateSegmentVolumeAction(id: Long, color: Option[com.scalableminds.util.image.Color], groupId: Option[Int], creationTime: Option[Long], + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + metadata: Option[Seq[MetadataEntry]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, - metadata: Option[Seq[MetadataEntry]] = None) - extends ApplyableVolumeAction + info: Option[String] = None) + extends ApplyableVolumeUpdateAction with ProtoGeometryImplicits { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: UpdateAction[VolumeTracing] = - CompactVolumeUpdateAction("createSegment", actionTimestamp, actionAuthorId, Json.obj("id" -> id)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = { val newSegment = @@ -271,21 +226,19 @@ case class CreateSegmentVolumeAction(id: Long, } } -object CreateSegmentVolumeAction { - implicit val jsonFormat: OFormat[CreateSegmentVolumeAction] = Json.format[CreateSegmentVolumeAction] -} - case class UpdateSegmentVolumeAction(id: Long, anchorPosition: Option[Vec3Int], name: Option[String], color: Option[com.scalableminds.util.image.Color], creationTime: Option[Long], groupId: Option[Int], + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + metadata: Option[Seq[MetadataEntry]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, - metadata: Option[Seq[MetadataEntry]] = None) - extends ApplyableVolumeAction + info: Option[String] = None) + extends ApplyableVolumeUpdateAction with ProtoGeometryImplicits with VolumeUpdateActionHelper { @@ -293,9 +246,9 @@ case class UpdateSegmentVolumeAction(id: Long, this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: UpdateAction[VolumeTracing] = - CompactVolumeUpdateAction("updateSegment", actionTimestamp, actionAuthorId, Json.obj("id" -> id)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = { def segmentTransform(segment: Segment): Segment = @@ -312,63 +265,54 @@ case class UpdateSegmentVolumeAction(id: Long, } } -object UpdateSegmentVolumeAction { - implicit val jsonFormat: OFormat[UpdateSegmentVolumeAction] = Json.format[UpdateSegmentVolumeAction] -} - case class DeleteSegmentVolumeAction(id: Long, + actionTracingId: String, actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None) - extends ApplyableVolumeAction { + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: UpdateAction[VolumeTracing] = - CompactVolumeUpdateAction("deleteSegment", actionTimestamp, actionAuthorId, Json.obj("id" -> id)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.withSegments(tracing.segments.filter(_.segmentId != id)) } -object DeleteSegmentVolumeAction { - implicit val jsonFormat: OFormat[DeleteSegmentVolumeAction] = Json.format[DeleteSegmentVolumeAction] -} - case class DeleteSegmentDataVolumeAction(id: Long, + actionTracingId: String, actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None) - extends VolumeUpdateAction { + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends BucketMutatingVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("deleteSegmentData", actionTimestamp, actionAuthorId, Json.obj()) -} - -object DeleteSegmentDataVolumeAction { - implicit val jsonFormat: OFormat[DeleteSegmentDataVolumeAction] = Json.format[DeleteSegmentDataVolumeAction] -} - -case class UpdateMappingNameAction(mappingName: Option[String], - isEditable: Option[Boolean], - isLocked: Option[Boolean], - actionTimestamp: Option[Long], - actionAuthorId: Option[String] = None) - extends ApplyableVolumeAction { - override def addTimestamp(timestamp: Long): VolumeUpdateAction = - this.copy(actionTimestamp = Some(timestamp)) - - override def transformToCompact: UpdateAction[VolumeTracing] = - CompactVolumeUpdateAction("updateMappingName", - actionTimestamp, - actionAuthorId, - Json.obj("mappingName" -> mappingName)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) +} + +case class UpdateMappingNameVolumeAction(mappingName: Option[String], + isEditable: Option[Boolean], + isLocked: Option[Boolean], + actionTracingId: String, + actionTimestamp: Option[Long], + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction { + override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = + this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = if (tracing.mappingIsLocked.getOrElse(false)) tracing // cannot change mapping name if it is locked @@ -378,101 +322,95 @@ case class UpdateMappingNameAction(mappingName: Option[String], mappingIsLocked = Some(isLocked.getOrElse(false))) } -object UpdateMappingNameAction { - implicit val jsonFormat: OFormat[UpdateMappingNameAction] = Json.format[UpdateMappingNameAction] +case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegmentGroup], + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction + with VolumeUpdateActionHelper { + override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.withSegmentGroups(segmentGroups.map(convertSegmentGroup)) + + override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = + this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } +// Only used to represent legacy update actions from the db where not all fields are set +// This is from a time when volume actions were not applied lazily +// (Before https://github.com/scalableminds/webknossos/pull/7917) case class CompactVolumeUpdateAction(name: String, + actionTracingId: String, actionTimestamp: Option[Long], actionAuthorId: Option[String] = None, value: JsObject) - extends VolumeUpdateAction + extends VolumeUpdateAction { + override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = + this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): UpdateAction = this + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) +} object CompactVolumeUpdateAction { implicit object compactVolumeUpdateActionFormat extends Format[CompactVolumeUpdateAction] { override def reads(json: JsValue): JsResult[CompactVolumeUpdateAction] = for { name <- (json \ "name").validate[String] + actionTracingId <- (json \ "value" \ "actionTracingId").validate[String] actionTimestamp <- (json \ "value" \ "actionTimestamp").validateOpt[Long] actionAuthorId <- (json \ "value" \ "actionAuthorId").validateOpt[String] - value <- (json \ "value").validate[JsObject].map(_ - "actionTimestamp") - } yield CompactVolumeUpdateAction(name, actionTimestamp, actionAuthorId, value) + value <- (json \ "value").validate[JsObject].map(_ - "actionTracingId" - "actionTimestamp" - "actionAuthorId") + } yield CompactVolumeUpdateAction(name, actionTracingId, actionTimestamp, actionAuthorId, value) override def writes(o: CompactVolumeUpdateAction): JsValue = Json.obj("name" -> o.name, "value" -> (Json.obj("actionTimestamp" -> o.actionTimestamp) ++ o.value)) } } -case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegmentGroup], - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends ApplyableVolumeAction - with VolumeUpdateActionHelper { - override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.withSegmentGroups(segmentGroups.map(convertSegmentGroup)) - - override def addTimestamp(timestamp: Long): UpdateAction[VolumeTracing] = - this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): UpdateAction[VolumeTracing] = - this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): UpdateAction[VolumeTracing] = this.copy(info = info) +object UpdateBucketVolumeAction { + implicit val jsonFormat: OFormat[UpdateBucketVolumeAction] = Json.format[UpdateBucketVolumeAction] +} +object UpdateTracingVolumeAction { + implicit val jsonFormat: OFormat[UpdateTracingVolumeAction] = Json.format[UpdateTracingVolumeAction] +} +object UpdateUserBoundingBoxesVolumeAction { + implicit val jsonFormat: OFormat[UpdateUserBoundingBoxesVolumeAction] = + Json.format[UpdateUserBoundingBoxesVolumeAction] +} +object UpdateUserBoundingBoxVisibilityVolumeAction { + implicit val jsonFormat: OFormat[UpdateUserBoundingBoxVisibilityVolumeAction] = + Json.format[UpdateUserBoundingBoxVisibilityVolumeAction] +} +object RemoveFallbackLayerVolumeAction { + implicit val jsonFormat: OFormat[RemoveFallbackLayerVolumeAction] = Json.format[RemoveFallbackLayerVolumeAction] +} +object ImportVolumeDataVolumeAction { + implicit val jsonFormat: OFormat[ImportVolumeDataVolumeAction] = Json.format[ImportVolumeDataVolumeAction] +} +object AddSegmentIndexVolumeAction { + implicit val jsonFormat: OFormat[AddSegmentIndexVolumeAction] = Json.format[AddSegmentIndexVolumeAction] +} +object CreateSegmentVolumeAction { + implicit val jsonFormat: OFormat[CreateSegmentVolumeAction] = Json.format[CreateSegmentVolumeAction] +} +object UpdateSegmentVolumeAction { + implicit val jsonFormat: OFormat[UpdateSegmentVolumeAction] = Json.format[UpdateSegmentVolumeAction] +} +object DeleteSegmentVolumeAction { + implicit val jsonFormat: OFormat[DeleteSegmentVolumeAction] = Json.format[DeleteSegmentVolumeAction] +} +object DeleteSegmentDataVolumeAction { + implicit val jsonFormat: OFormat[DeleteSegmentDataVolumeAction] = Json.format[DeleteSegmentDataVolumeAction] +} +object UpdateMappingNameVolumeAction { + implicit val jsonFormat: OFormat[UpdateMappingNameVolumeAction] = Json.format[UpdateMappingNameVolumeAction] } - object UpdateSegmentGroupsVolumeAction { implicit val jsonFormat: OFormat[UpdateSegmentGroupsVolumeAction] = Json.format[UpdateSegmentGroupsVolumeAction] } - -object VolumeUpdateAction { - - implicit object volumeUpdateActionFormat extends Format[VolumeUpdateAction] { - override def reads(json: JsValue): JsResult[VolumeUpdateAction] = - (json \ "name").validate[String].flatMap { - case "updateBucket" => (json \ "value").validate[UpdateBucketVolumeAction] - case "updateTracing" => (json \ "value").validate[UpdateTracingVolumeAction] - case "revertToVersion" => (json \ "value").validate[RevertToVersionVolumeAction] - case "updateUserBoundingBoxes" => (json \ "value").validate[UpdateUserBoundingBoxes] - case "updateUserBoundingBoxVisibility" => (json \ "value").validate[UpdateUserBoundingBoxVisibility] - case "removeFallbackLayer" => (json \ "value").validate[RemoveFallbackLayer] - case "importVolumeTracing" => (json \ "value").validate[ImportVolumeData] - case "updateTdCamera" => (json \ "value").validate[UpdateTdCamera] - case "createSegment" => (json \ "value").validate[CreateSegmentVolumeAction] - case "updateSegment" => (json \ "value").validate[UpdateSegmentVolumeAction] - case "updateSegmentGroups" => (json \ "value").validate[UpdateSegmentGroupsVolumeAction] - case "deleteSegment" => (json \ "value").validate[DeleteSegmentVolumeAction] - case "deleteSegmentData" => (json \ "value").validate[DeleteSegmentDataVolumeAction] - case "updateMappingName" => (json \ "value").validate[UpdateMappingNameAction] - case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") - } - - override def writes(o: VolumeUpdateAction): JsValue = o match { - case s: UpdateBucketVolumeAction => - Json.obj("name" -> "updateBucket", "value" -> Json.toJson(s)(UpdateBucketVolumeAction.jsonFormat)) - case s: UpdateTracingVolumeAction => - Json.obj("name" -> "updateTracing", "value" -> Json.toJson(s)(UpdateTracingVolumeAction.jsonFormat)) - case s: RevertToVersionVolumeAction => - Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionVolumeAction.jsonFormat)) - case s: UpdateUserBoundingBoxes => - Json.obj("name" -> "updateUserBoundingBoxes", "value" -> Json.toJson(s)(UpdateUserBoundingBoxes.jsonFormat)) - case s: UpdateUserBoundingBoxVisibility => - Json.obj("name" -> "updateUserBoundingBoxVisibility", - "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibility.jsonFormat)) - case s: RemoveFallbackLayer => - Json.obj("name" -> "removeFallbackLayer", "value" -> Json.toJson(s)(RemoveFallbackLayer.jsonFormat)) - case s: ImportVolumeData => - Json.obj("name" -> "importVolumeTracing", "value" -> Json.toJson(s)(ImportVolumeData.jsonFormat)) - case s: UpdateTdCamera => - Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCamera.jsonFormat)) - case s: CreateSegmentVolumeAction => - Json.obj("name" -> "createSegment", "value" -> Json.toJson(s)(CreateSegmentVolumeAction.jsonFormat)) - case s: UpdateSegmentVolumeAction => - Json.obj("name" -> "updateSegment", "value" -> Json.toJson(s)(UpdateSegmentVolumeAction.jsonFormat)) - case s: DeleteSegmentVolumeAction => - Json.obj("name" -> "deleteSegment", "value" -> Json.toJson(s)(DeleteSegmentVolumeAction.jsonFormat)) - case s: UpdateSegmentGroupsVolumeAction => - Json.obj("name" -> "updateSegmentGroups", "value" -> Json.toJson(s)(UpdateSegmentGroupsVolumeAction.jsonFormat)) - case s: CompactVolumeUpdateAction => Json.toJson(s)(CompactVolumeUpdateAction.compactVolumeUpdateActionFormat) - } - } - -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/WKWBucketStreamSink.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/WKWBucketStreamSink.scala index ec8df4f8f59..b1783fe8d9d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/WKWBucketStreamSink.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/WKWBucketStreamSink.scala @@ -18,7 +18,7 @@ import scala.concurrent.{ExecutionContext, Future} class WKWBucketStreamSink(val layer: DataLayer, tracingHasFallbackLayer: Boolean) extends WKWDataFormatHelper - with VolumeBucketReversionHelper + with ReversionHelper with ByteUtils { def apply(bucketStream: Iterator[(BucketPosition, Array[Byte])], mags: Seq[Vec3Int])( @@ -27,7 +27,7 @@ class WKWBucketStreamSink(val layer: DataLayer, tracingHasFallbackLayer: Boolean val header = WKWHeader(1, DataLayer.bucketLength, ChunkType.LZ4, voxelType, numChannels) bucketStream.flatMap { case (bucket, data) => - val skipBucket = if (tracingHasFallbackLayer) isRevertedBucket(data) else isAllZero(data) + val skipBucket = if (tracingHasFallbackLayer) isRevertedElement(data) else isAllZero(data) if (skipBucket) { // If the tracing has no fallback segmentation, all-zero buckets can be omitted entirely None diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala index 40b0c75db6d..be80c1dfb78 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala @@ -28,7 +28,7 @@ import scala.concurrent.{ExecutionContext, Future} // Creates data zip from volume tracings class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLayer: Boolean) extends ProtoGeometryImplicits - with VolumeBucketReversionHelper + with ReversionHelper with Zarr3OutputHelper with ByteUtils { @@ -44,7 +44,7 @@ class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLay val header = Zarr3ArrayHeader.fromDataLayer(layer, mags.headOption.getOrElse(Vec3Int.ones)) bucketStream.flatMap { case (bucket, data) => - val skipBucket = if (tracingHasFallbackLayer) isAllZero(data) else isRevertedBucket(data) + val skipBucket = if (tracingHasFallbackLayer) isAllZero(data) else isRevertedElement(data) if (skipBucket) { // If the tracing has no fallback segmentation, all-zero buckets can be omitted entirely None diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index d1384d8aa4b..19b7ce3a685 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -1,82 +1,76 @@ -# Routes -# This file defines all application routes (Higher priority routes first) +# Defines tracingstore routes (Higher priority routes first) # ~~~~ # Health endpoint -GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health +GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health + +# Annotations (concerns AnnotationProto, not annotation info as stored in postgres) +POST /annotation/save @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.save(annotationId: String) +GET /annotation/:annotationId @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.get(annotationId: String, version: Option[Long]) +POST /annotation/:annotationId/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(annotationId: String) +GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) +GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) +GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) +POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Boolean, datasetBoundingBox: Option[String]) +POST /annotation/:annotationId/resetToBase @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.resetToBase(annotationId: String) +POST /annotation/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.mergedFromIds(toTemporaryStore: Boolean, newAnnotationId: String) # Volume tracings -POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(token: Option[String]) -POST /volume/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(token: Option[String], tracingId: String, minMag: Option[Int], maxMag: Option[Int]) -POST /volume/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(token: Option[String], tracingId: String) -GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(token: Option[String], tracingId: String, version: Option[Long]) -GET /volume/:tracingId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.newestVersion(token: Option[String], tracingId: String) -POST /volume/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.update(token: Option[String], tracingId: String) -GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(token: Option[String], tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) -POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(token: Option[String], tracingId: String) -POST /volume/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(token: Option[String], tracingId: String, fromTask: Option[Boolean], minMag: Option[Int], maxMag: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) -GET /volume/:tracingId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.updateActionLog(token: Option[String], tracingId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) -POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(token: Option[String], tracingId: String) -POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(token: Option[String], tracingId: String) -POST /volume/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(token: Option[String], tracingId: String, segmentId: Long) -POST /volume/:tracingId/importVolumeData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.importVolumeData(token: Option[String], tracingId: String) -POST /volume/:tracingId/addSegmentIndex @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.addSegmentIndex(token: Option[String], tracingId: String, dryRun: Boolean) -GET /volume/:tracingId/findData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.findData(token: Option[String], tracingId: String) -GET /volume/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateSkeleton(token: Option[String], tracingId: String, agglomerateId: Long) -POST /volume/:tracingId/makeMappingEditable @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.makeMappingEditable(token: Option[String], tracingId: String) -POST /volume/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateGraphMinCut(token: Option[String], tracingId: String) -POST /volume/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateGraphNeighbors(token: Option[String], tracingId: String) -POST /volume/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(token: Option[String], tracingId: String) -POST /volume/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(token: Option[String], tracingId: String) -POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple(token: Option[String]) -POST /volume/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromIds(token: Option[String], persist: Boolean) -POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents(token: Option[String], persist: Boolean) +POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(newTracingId: Option[String]) +POST /volume/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(tracingId: String, minMag: Option[Int], maxMag: Option[Int]) +POST /volume/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(tracingId: String) +GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(tracingId: String, annotationId: String, version: Option[Long]) +GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(tracingId: String, annotationId: Option[String], version: Option[Long], volumeDataZipFormat: String, voxelSize: Option[String], voxelSizeUnit: Option[String]) +POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(tracingId: String, annotationId: String) +POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(tracingId: String) +POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(tracingId: String) +POST /volume/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(tracingId: String, minMag: Option[Int], maxMag: Option[Int], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /volume/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(tracingId: String, segmentId: Long) +POST /volume/:tracingId/importVolumeData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.importVolumeData(tracingId: String) +GET /volume/:tracingId/findData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.findData(tracingId: String) +POST /volume/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(tracingId: String) +POST /volume/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(tracingId: String) +POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple +POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents # Editable Mappings -POST /mapping/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.updateEditableMapping(token: Option[String], tracingId: String) -GET /mapping/:tracingId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingUpdateActionLog(token: Option[String], tracingId: String) -GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingInfo(token: Option[String], tracingId: String, version: Option[Long]) -GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingSegmentIdsForAgglomerate(token: Option[String], tracingId: String, agglomerateId: Long) -POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingAgglomerateIdsForSegments(token: Option[String], tracingId: String) +GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(tracingId: String, annotationId: String, version: Option[Long]) +GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(tracingId: String, agglomerateId: Long) +POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(tracingId: String, annotationId: String, version: Option[Long]) +POST /mapping/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(tracingId: String) +POST /mapping/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(tracingId: String) +GET /mapping/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateSkeleton(tracingId: String, agglomerateId: Long) # Zarr endpoints for volume annotations # Zarr version 2 -GET /volume/zarr/json/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(token: Option[String], tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/json/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/.zgroup @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zGroup(token: Option[String], tracingId: String) -GET /volume/zarr/:tracingId/.zattrs @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zAttrs(token: Option[String], tracingId: String) -GET /volume/zarr/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(token: Option[String], tracingId: String, tracingName: Option[String], zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/:mag/.zarray @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zArray(token: Option[String], tracingId: String, mag: String) -GET /volume/zarr/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(token: Option[String], tracingId: String, mag: String, coordinates: String) +GET /volume/zarr/json/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/json/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/.zgroup @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zGroup(tracingId: String) +GET /volume/zarr/:tracingId/.zattrs @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zAttrs(tracingId: String) +GET /volume/zarr/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(tracingId: String, tracingName: Option[String], zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/:mag/.zarray @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zArray(tracingId: String, mag: String) +GET /volume/zarr/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(tracingId: String, mag: String, coordinates: String) # Zarr version 3 -GET /volume/zarr3_experimental/json/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(token: Option[String], tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/json/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(token: Option[String], tracingId: String, tracingName: Option[String], zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJson(token: Option[String], tracingId: String) -GET /volume/zarr3_experimental/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/:mag/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJsonForMag(token: Option[String], tracingId: String, mag: String) -GET /volume/zarr3_experimental/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(token: Option[String], tracingId: String, mag: String, coordinates: String) +GET /volume/zarr3_experimental/json/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/json/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(tracingId: String, tracingName: Option[String], zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJson(tracingId: String) +GET /volume/zarr3_experimental/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/:mag/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJsonForMag(tracingId: String, mag: String) +GET /volume/zarr3_experimental/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(tracingId: String, mag: String, coordinates: String) # Skeleton tracings -POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save(token: Option[String]) -POST /skeleton/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.saveMultiple(token: Option[String]) - -POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents(token: Option[String], persist: Boolean) -POST /skeleton/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromIds(token: Option[String], persist: Boolean) - -GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(token: Option[String], tracingId: String, version: Option[Long]) -GET /skeleton/:tracingId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.newestVersion(token: Option[String], tracingId: String) -GET /skeleton/:tracingId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.updateActionStatistics(token: Option[String], tracingId: String) -GET /skeleton/:tracingId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.updateActionLog(token: Option[String], tracingId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) -POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple(token: Option[String]) - -POST /skeleton/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.update(token: Option[String], tracingId: String) -POST /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(token: Option[String], tracingId: String, version: Option[Long], fromTask: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save() +POST /skeleton/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.saveMultiple() +POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents +GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(tracingId: String, annotationId: String, version: Option[Long]) +POST /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(tracingId: String, editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple
- {stats.treeCount} {formatLabel(pluralize("Tree", stats.treeCount))} + {skeletonStats.treeCount} {formatLabel(pluralize("Tree", skeletonStats.treeCount))} @@ -260,7 +268,7 @@ export function AnnotationStats({ /> - {stats.segmentCount} {formatLabel(pluralize("Segment", stats.segmentCount))} + {totalSegmentCount} {formatLabel(pluralize("Segment", totalSegmentCount))}