diff --git a/app/Startup.scala b/app/Startup.scala index 5903094610..23f3cf49ff 100755 --- a/app/Startup.scala +++ b/app/Startup.scala @@ -98,7 +98,7 @@ class Startup @Inject()(actorSystem: ActorSystem, } private def ensurePostgresSchema(): Unit = { - logger.info("Checking database schema…") + logger.info("Checking database schema...") val errorMessageBuilder = mutable.ListBuffer[String]() val capturingProcessLogger = @@ -115,7 +115,7 @@ class Startup @Inject()(actorSystem: ActorSystem, } private def ensurePostgresDatabase(): Unit = { - logger.info(s"Ensuring Postgres database…") + logger.info(s"Ensuring Postgres database...") val processLogger = ProcessLogger((o: String) => logger.info(s"dbtool: $o"), (e: String) => logger.error(s"dbtool: $e")) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 682e6f3965..d3d1147b0d 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -132,12 +132,12 @@ class AnnotationIOController @Inject()( volumeLayersGrouped <- adaptVolumeTracingsToFallbackLayer(volumeLayersGroupedRaw, dataset, usableDataSource) tracingStoreClient <- tracingStoreService.clientFor(dataset) newAnnotationId = ObjectId.generate - mergedVolumeLayers <- mergeAndSaveVolumeLayers(newAnnotationId, - volumeLayersGrouped, - tracingStoreClient, - parsedFiles.otherFiles, - usableDataSource, - dataset._id) + (mergedVolumeLayers, earliestAccessibleVersion) <- mergeAndSaveVolumeLayers(newAnnotationId, + volumeLayersGrouped, + tracingStoreClient, + parsedFiles.otherFiles, + usableDataSource, + dataset._id) mergedSkeletonLayers <- mergeAndSaveSkeletonLayers(skeletonTracings, tracingStoreClient) annotation <- annotationService.createFrom(request.identity, dataset, @@ -145,12 +145,12 @@ class AnnotationIOController @Inject()( AnnotationType.Explorational, name, description, - ObjectId.generate) + newAnnotationId) annotationProto = AnnotationProto( description = annotation.description, version = 0L, annotationLayers = annotation.annotationLayers.map(_.toProto), - earliestAccessibleVersion = 0L + earliestAccessibleVersion = earliestAccessibleVersion ) _ <- tracingStoreClient.saveAnnotationProto(annotation._id, annotationProto) _ <- annotationDAO.insertOne(annotation) @@ -163,37 +163,64 @@ class AnnotationIOController @Inject()( } } + private def layersHaveDuplicateFallbackLayer(annotationLayers: Seq[UploadedVolumeLayer]) = { + val withFallbackLayer = annotationLayers.filter(_.tracing.fallbackLayer.isDefined) + withFallbackLayer.length > withFallbackLayer.distinctBy(_.tracing.fallbackLayer).length + } + private def mergeAndSaveVolumeLayers(newAnnotationId: ObjectId, volumeLayersGrouped: Seq[List[UploadedVolumeLayer]], client: WKRemoteTracingStoreClient, otherFiles: Map[String, File], dataSource: UsableDataSource, - datasetId: ObjectId): Fox[List[AnnotationLayer]] = + datasetId: ObjectId): Fox[(List[AnnotationLayer], Long)] = if (volumeLayersGrouped.isEmpty) - Fox.successful(List()) + Fox.successful(List(), 0L) + else if (volumeLayersGrouped.exists(layersHaveDuplicateFallbackLayer(_))) + Fox.failure("Cannot save annotation with multiple volume layers that have the same fallback segmentation layer.") else if (volumeLayersGrouped.length > 1 && volumeLayersGrouped.exists(_.length > 1)) Fox.failure("Cannot merge multiple annotations that each have multiple volume layers.") - else if (volumeLayersGrouped.length == 1) { // Just one annotation was uploaded, keep its layers separate - Fox.serialCombined(volumeLayersGrouped.toList.flatten.zipWithIndex) { volumeLayerWithIndex => - val uploadedVolumeLayer = volumeLayerWithIndex._1 - val idx = volumeLayerWithIndex._2 - val newTracingId = TracingId.generate - for { - _ <- client.saveVolumeTracing(newAnnotationId, - newTracingId, - uploadedVolumeLayer.tracing, - uploadedVolumeLayer.getDataZipFrom(otherFiles), - dataSource = dataSource, - datasetId = datasetId) - } yield - AnnotationLayer( - newTracingId, - AnnotationLayerType.Volume, - uploadedVolumeLayer.name.getOrElse(AnnotationLayer.defaultVolumeLayerName + idx.toString), - AnnotationLayerStatistics.unknown - ) - } - } else { // Multiple annotations with volume layers (but at most one each) was uploaded merge those volume layers into one + else if (volumeLayersGrouped.length > 1 && volumeLayersGrouped.exists( + _.exists(_.editedMappingEdgesLocation.isDefined))) { + Fox.failure("Cannot merge multiple annotations with editable mapping (proofreading) edges.") + } else if (volumeLayersGrouped.length == 1) { // Just one annotation was uploaded, keep its layers separate + var layerUpdatesStartVersionMutable = 1L + for { + annotationLayers <- Fox.serialCombined(volumeLayersGrouped.toList.flatten.zipWithIndex) { + volumeLayerWithIndex => + val uploadedVolumeLayer = volumeLayerWithIndex._1 + val idx = volumeLayerWithIndex._2 + val newTracingId = TracingId.generate + for { + numberOfSavedVersions <- client.saveEditableMappingIfPresent( + newAnnotationId, + newTracingId, + uploadedVolumeLayer.getEditableMappingEdgesZipFrom(otherFiles), + uploadedVolumeLayer.editedMappingBaseMappingName, + startVersion = layerUpdatesStartVersionMutable + ) + // The next layer’s update actions then need to start after this one + _ = layerUpdatesStartVersionMutable = layerUpdatesStartVersionMutable + numberOfSavedVersions + mappingName = if (uploadedVolumeLayer.editedMappingEdgesLocation.isDefined) Some(newTracingId) + else uploadedVolumeLayer.tracing.mappingName + _ <- client.saveVolumeTracing( + newAnnotationId, + newTracingId, + uploadedVolumeLayer.tracing.copy(mappingName = mappingName), + uploadedVolumeLayer.getDataZipFrom(otherFiles), + dataSource = dataSource, + datasetId = datasetId + ) + } yield + AnnotationLayer( + newTracingId, + AnnotationLayerType.Volume, + uploadedVolumeLayer.name.getOrElse(AnnotationLayer.defaultVolumeLayerName + idx.toString), + AnnotationLayerStatistics.unknown + ) + } + } yield (annotationLayers, layerUpdatesStartVersionMutable) + } else { // Multiple annotations with volume layers (but at most one each) were uploaded, they have no editable mappings. Merge those volume layers into one val uploadedVolumeLayersFlat = volumeLayersGrouped.toList.flatten val newTracingId = TracingId.generate for { @@ -206,13 +233,14 @@ class AnnotationIOController @Inject()( uploadedVolumeLayersFlat.map(v => v.getDataZipFrom(otherFiles)) ) } yield - List( - AnnotationLayer( - newTracingId, - AnnotationLayerType.Volume, - AnnotationLayer.defaultVolumeLayerName, - AnnotationLayerStatistics.unknown - )) + (List( + AnnotationLayer( + newTracingId, + AnnotationLayerType.Volume, + AnnotationLayer.defaultVolumeLayerName, + AnnotationLayerStatistics.unknown + )), + 0L) } private def mergeAndSaveSkeletonLayers(skeletonTracings: List[SkeletonTracing], diff --git a/app/models/annotation/AnnotationUploadService.scala b/app/models/annotation/AnnotationUploadService.scala index 966f2b9c8e..b85ce47cf4 100644 --- a/app/models/annotation/AnnotationUploadService.scala +++ b/app/models/annotation/AnnotationUploadService.scala @@ -21,9 +21,17 @@ import play.api.i18n.MessagesProvider import scala.concurrent.{ExecutionContext, Future} -case class UploadedVolumeLayer(tracing: VolumeTracing, dataZipLocation: String, name: Option[String]) { +case class UploadedVolumeLayer(tracing: VolumeTracing, + dataZipLocation: String, + name: Option[String], + editedMappingEdgesLocation: Option[String], + editedMappingBaseMappingName: Option[String]) { def getDataZipFrom(otherFiles: Map[String, File]): Option[File] = otherFiles.get(dataZipLocation) + + def getEditableMappingEdgesZipFrom(otherFiles: Map[String, File]): Option[File] = + editedMappingEdgesLocation.flatMap(otherFiles.get) + } case class SharedParsingParameters(useZipName: Boolean, diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index d8b0c9ee15..6292538848 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -249,6 +249,23 @@ class WKRemoteTracingStoreClient( } yield () } + def saveEditableMappingIfPresent(annotationId: ObjectId, + newTracingId: String, + editedMappingEdgesZip: Option[File], + editedMappingBaseMappingName: Option[String], + startVersion: Long): Fox[Long] = + (editedMappingEdgesZip, editedMappingBaseMappingName) match { + case (Some(zipfile), Some(baseMappingName)) => + rpc(s"${tracingStore.url}/tracings/mapping/$newTracingId/save").withLongTimeout + .addQueryParam("token", RpcTokenHolder.webknossosToken) + .addQueryParam("annotationId", annotationId) + .addQueryParam("baseMappingName", baseMappingName) + .addQueryParam("startVersion", startVersion) + .postFileWithJsonResponse[Long](zipfile) + case (None, None) => Fox.successful(0L) + case _ => Fox.failure("annotation.upload.editableMappingIncompleteInformation") + } + def getVolumeTracing(annotationId: ObjectId, annotationLayer: AnnotationLayer, version: Option[Long], diff --git a/app/models/annotation/nml/NmlParser.scala b/app/models/annotation/nml/NmlParser.scala index 1a902d61bb..cd7ef69f22 100755 --- a/app/models/annotation/nml/NmlParser.scala +++ b/app/models/annotation/nml/NmlParser.scala @@ -91,10 +91,13 @@ class NmlParser @Inject()(datasetDAO: DatasetDAO) segmentGroups = v.segmentGroups, hasSegmentIndex = None, // Note: this property may be adapted later in adaptPropertiesToFallbackLayer editPositionAdditionalCoordinates = nmlParams.editPositionAdditionalCoordinates, - additionalAxes = nmlParams.additionalAxisProtos + additionalAxes = nmlParams.additionalAxisProtos, + hasEditableMapping = if (v.editedMappingEdgesLocation.isDefined) Some(true) else None ), basePath.getOrElse("") + v.dataZipPath, v.name, + v.editedMappingEdgesLocation.map(location => basePath.getOrElse("") + location), + v.editedMappingBaseMappingName ) } skeletonTracing: SkeletonTracing = SkeletonTracing( @@ -220,7 +223,9 @@ class NmlParser @Inject()(datasetDAO: DatasetDAO) getSingleAttributeOpt(node, "name"), parseVolumeSegmentMetadata(node \ "segments" \ "segment"), getSingleAttributeOpt(node, "largestSegmentId").flatMap(_.toLongOpt), - extractSegmentGroups(node \ "groups").getOrElse(List()) + extractSegmentGroups(node \ "groups").getOrElse(List()), + getSingleAttributeOpt(node, "editedMappingEdgesLocation"), + getSingleAttributeOpt(node, "editedMappingBaseMappingName") ) } ) diff --git a/app/models/annotation/nml/NmlVolumeTag.scala b/app/models/annotation/nml/NmlVolumeTag.scala index 95939f3538..5ed6212376 100644 --- a/app/models/annotation/nml/NmlVolumeTag.scala +++ b/app/models/annotation/nml/NmlVolumeTag.scala @@ -9,4 +9,6 @@ case class NmlVolumeTag(dataZipPath: String, name: Option[String], segments: Seq[Segment], largestSegmentId: Option[Long], - segmentGroups: Seq[SegmentGroup]) {} + segmentGroups: Seq[SegmentGroup], + editedMappingEdgesLocation: Option[String], + editedMappingBaseMappingName: Option[String]) diff --git a/conf/application.conf b/conf/application.conf index 9615f3c3da..e62609240d 100644 --- a/conf/application.conf +++ b/conf/application.conf @@ -200,6 +200,7 @@ tracingstore { address = "localhost" port = 6379 } + cache.chunkCacheMaxSizeBytes = 20000000 # 20 MB } # Serve image data. Only active if the corresponding play module is enabled diff --git a/conf/messages b/conf/messages index d5b1f7e780..1c4f8b02ec 100644 --- a/conf/messages +++ b/conf/messages @@ -267,6 +267,7 @@ annotation.idForTracing.failed=Could not find the annotation id for this tracing annotation.editableMapping.getAgglomerateGraph.failed=Could not look up an agglomerate graph for requested agglomerate. annotation.editableMapping.getAgglomerateIdsForSegments.failed=Could not look up agglomerate ids for requested segments. annotation.duplicate.failed=Failed to duplicate annotation +annotation.upload.editableMappingIncompleteInformation=Could not store editable mapping, either file or baseMappingName is missing. mesh.file.listChunks.failed=Failed to load chunk list for segment {0} from mesh file “{1}” mesh.file.loadChunk.failed=Failed to load mesh chunk for segment diff --git a/frontend/javascripts/viewer/view/version_entry.tsx b/frontend/javascripts/viewer/view/version_entry.tsx index ad2f6b6929..b98bdd2e7c 100644 --- a/frontend/javascripts/viewer/view/version_entry.tsx +++ b/frontend/javascripts/viewer/view/version_entry.tsx @@ -167,7 +167,7 @@ const descriptionFns: Record< ? `at position ${action.value.segmentPosition1}` : (action.value.segmentId1 ?? "unknown"); const segment2Description = - action.value.segmentPosition2 ?? action.value.segmentId1 ?? "unknown"; + action.value.segmentPosition2 ?? action.value.segmentId2 ?? "unknown"; const description = `Split agglomerate ${action.value.agglomerateId} by separating the segments ${segment1Description} and ${segment2Description}.`; return { description, @@ -180,7 +180,7 @@ const descriptionFns: Record< ? `at position ${action.value.segmentPosition1}` : (action.value.segmentId1 ?? "unknown"); const segment2Description = - action.value.segmentPosition2 ?? action.value.segmentId1 ?? "unknown"; + action.value.segmentPosition2 ?? action.value.segmentId2 ?? "unknown"; const description = `Merged agglomerates ${action.value.agglomerateId1} and ${action.value.agglomerateId2} by combining the segments ${segment1Description} and ${segment2Description}.`; return { description, diff --git a/unreleased_changes/8969.md b/unreleased_changes/8969.md new file mode 100644 index 0000000000..4b0fb91b52 --- /dev/null +++ b/unreleased_changes/8969.md @@ -0,0 +1,2 @@ +### Added +- Editable mapping (aka proofreading) annotations can now be downloaded as zipfile and re-uploaded. diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala index f5630730bb..fe35ac8c7c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala @@ -61,7 +61,7 @@ class DataStoreModule extends AbstractModule { bind(classOf[Hdf5ConnectomeFileService]).asEagerSingleton() bind(classOf[NeuroglancerPrecomputedMeshFileService]).asEagerSingleton() bind(classOf[RemoteSourceDescriptorService]).asEagerSingleton() - bind(classOf[ChunkCacheService]).asEagerSingleton() + bind(classOf[DSChunkCacheService]).asEagerSingleton() bind(classOf[DatasetCache]).asEagerSingleton() } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ArrayDataType.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ArrayDataType.scala index 9ad4109315..35622842b3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ArrayDataType.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ArrayDataType.scala @@ -4,62 +4,66 @@ import com.scalableminds.util.enumeration.ExtendedEnumeration object ArrayDataType extends ExtendedEnumeration { type ArrayDataType = Value - val f8, f4, i8, u8, i4, u4, i2, u2, i1, u1 = Value + val f8, f4, i8, u8, i4, u4, i2, u2, i1, u1, bool = Value def bytesPerElement(dataType: ArrayDataType): Int = dataType match { - case ArrayDataType.f8 => 8 - case ArrayDataType.f4 => 4 - case ArrayDataType.i8 => 8 - case ArrayDataType.u8 => 8 - case ArrayDataType.i4 => 4 - case ArrayDataType.u4 => 4 - case ArrayDataType.i2 => 2 - case ArrayDataType.u2 => 2 - case ArrayDataType.i1 => 1 - case ArrayDataType.u1 => 1 + case ArrayDataType.f8 => 8 + case ArrayDataType.f4 => 4 + case ArrayDataType.i8 => 8 + case ArrayDataType.u8 => 8 + case ArrayDataType.i4 => 4 + case ArrayDataType.u4 => 4 + case ArrayDataType.i2 => 2 + case ArrayDataType.u2 => 2 + case ArrayDataType.i1 => 1 + case ArrayDataType.u1 => 1 + case ArrayDataType.bool => 1 } def maxValue(dataType: ArrayDataType): Number = dataType match { - case ArrayDataType.f8 => Double.MaxValue - case ArrayDataType.f4 => Float.MaxValue - case ArrayDataType.i8 => Long.MaxValue - case ArrayDataType.u8 => Long.MaxValue // Max value for primitive datatypes - case ArrayDataType.i4 => Int.MaxValue - case ArrayDataType.u4 => Math.pow(2, 4 * 8).toLong - 1 - case ArrayDataType.i2 => Char.MaxValue - case ArrayDataType.u2 => Math.pow(2, 2 * 8).toLong - 1 - case ArrayDataType.i1 => Byte.MaxValue - case ArrayDataType.u1 => Math.pow(2, 1 * 8).toLong - 1 + case ArrayDataType.f8 => Double.MaxValue + case ArrayDataType.f4 => Float.MaxValue + case ArrayDataType.i8 => Long.MaxValue + case ArrayDataType.u8 => Long.MaxValue // Max value for primitive datatypes + case ArrayDataType.i4 => Int.MaxValue + case ArrayDataType.u4 => Math.pow(2, 4 * 8).toLong - 1 + case ArrayDataType.i2 => Char.MaxValue + case ArrayDataType.u2 => Math.pow(2, 2 * 8).toLong - 1 + case ArrayDataType.i1 => Byte.MaxValue + case ArrayDataType.u1 => Math.pow(2, 1 * 8).toLong - 1 + case ArrayDataType.bool => 1 } def minValue(dataType: ArrayDataType): Number = dataType match { - case ArrayDataType.f8 => Double.MinValue - case ArrayDataType.f4 => Float.MinValue - case ArrayDataType.i8 => Long.MinValue - case ArrayDataType.u8 => 0 - case ArrayDataType.i4 => Int.MinValue - case ArrayDataType.u4 => 0 - case ArrayDataType.i2 => Char.MinValue - case ArrayDataType.u2 => 0 - case ArrayDataType.i1 => Byte.MinValue - case ArrayDataType.u1 => 0 + case ArrayDataType.f8 => Double.MinValue + case ArrayDataType.f4 => Float.MinValue + case ArrayDataType.i8 => Long.MinValue + case ArrayDataType.u8 => 0 + case ArrayDataType.i4 => Int.MinValue + case ArrayDataType.u4 => 0 + case ArrayDataType.i2 => Char.MinValue + case ArrayDataType.u2 => 0 + case ArrayDataType.i1 => Byte.MinValue + case ArrayDataType.u1 => 0 + case ArrayDataType.bool => 0 } def toWKWId(dataType: ArrayDataType): Int = dataType match { - case ArrayDataType.u1 => 1 - case ArrayDataType.u2 => 2 - case ArrayDataType.u4 => 3 - case ArrayDataType.u8 => 4 - case ArrayDataType.f4 => 5 - case ArrayDataType.f8 => 6 - case ArrayDataType.i1 => 7 - case ArrayDataType.i2 => 8 - case ArrayDataType.i4 => 9 - case ArrayDataType.i8 => 10 + case ArrayDataType.u1 => 1 + case ArrayDataType.u2 => 2 + case ArrayDataType.u4 => 3 + case ArrayDataType.u8 => 4 + case ArrayDataType.f4 => 5 + case ArrayDataType.f8 => 6 + case ArrayDataType.i1 => 7 + case ArrayDataType.i2 => 8 + case ArrayDataType.i4 => 9 + case ArrayDataType.i8 => 10 + case ArrayDataType.bool => ??? } def fromWKWTypeId(wkwVoxelTypeId: Int): ArrayDataType.Value = diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkTyper.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkTyper.scala index 660fa3d8eb..90b8cddbd1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkTyper.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkTyper.scala @@ -20,6 +20,7 @@ object ChunkTyper { case ArrayDataType.i8 | ArrayDataType.u8 => new LongChunkTyper(header) case ArrayDataType.f4 => new FloatChunkTyper(header) case ArrayDataType.f8 => new DoubleChunkTyper(header) + case ArrayDataType.bool => new BoolChunkTyper(header) } } @@ -37,7 +38,10 @@ abstract class ChunkTyper extends FoxImplicits { fillValueChunkCache.getOrLoad(chunkShape.mkString(","), _ => createFromFillValue(chunkShape).toFox) protected def createFromFillValue(chunkShape: Array[Int]): Box[MultiArray] = - MultiArrayUtils.createFilledArray(ma2DataType, chunkShapeOrdered(chunkShape), header.fillValueNumber) + MultiArrayUtils.createFilledArray(ma2DataType, + chunkShapeOrdered(chunkShape), + header.fillValueNumber, + header.fillValueBoolean) // Chunk shape in header is in C-Order (XYZ), but data may be in F-Order (ZYX), so the chunk shape // associated with the array needs to be adjusted. @@ -128,6 +132,20 @@ class FloatChunkTyper(val header: DatasetHeader) extends ChunkTyper { }.get) } +class BoolChunkTyper(val header: DatasetHeader) extends ChunkTyper { + + val ma2DataType: MADataType = MADataType.BOOLEAN + + def wrapAndType(bytes: Array[Byte], chunkShape: Array[Int]): Box[MultiArray] = tryo { + val typedStorage = new Array[Boolean](chunkShape.product) + bytes.zipWithIndex.foreach { + case (b, i) => + typedStorage(i) = b != 0 + } + MultiArray.factory(ma2DataType, chunkShapeOrdered(chunkShape), typedStorage) + } +} + // In no-partial-copy shortcut, the MultiArray shape is never used, so it is just set to flat. // type is always BYTE class ShortcutChunkTyper(val header: DatasetHeader) extends ChunkTyper { @@ -140,6 +158,6 @@ class ShortcutChunkTyper(val header: DatasetHeader) extends ChunkTyper { override protected def createFromFillValue(chunkShape: Array[Int]): Box[MultiArray] = { val flatShape = Array(chunkShape.product * header.bytesPerElement) - MultiArrayUtils.createFilledArray(ma2DataType, flatShape, header.fillValueNumber) + MultiArrayUtils.createFilledArray(ma2DataType, flatShape, header.fillValueNumber, header.fillValueBoolean) } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetHeader.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetHeader.scala index 3935b42ea6..20a41d4af8 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetHeader.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetHeader.scala @@ -38,6 +38,12 @@ trait DatasetHeader { case Left(s) => parseFillValueFromString(s) } + lazy val fillValueBoolean: Boolean = + fill_value match { + case Left("true") => true + case _ => false + } + def boundingBox(axisOrder: AxisOrder): Option[BoundingBox] = datasetShape.flatMap { shape => if (Math.max(Math.max(axisOrder.x, axisOrder.y), axisOrder.zWithFallback) >= rank && axisOrder.hasZAxis) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala index f86af6b6c4..b290ee7380 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala @@ -19,6 +19,7 @@ object MultiArrayUtils extends LazyLogging { case ArrayDataType.i8 | ArrayDataType.u8 => new Array[Long](length) case ArrayDataType.f4 => new Array[Float](length) case ArrayDataType.f8 => new Array[Double](length) + case ArrayDataType.bool => new Array[Boolean](length) } } @@ -28,17 +29,21 @@ object MultiArrayUtils extends LazyLogging { MultiArray.factory(MADataType.getType(aClass.getComponentType, false), shape, storage) } - def createFilledArray(dataType: MADataType, shape: Array[Int], fill: Number): Box[MultiArray] = { + def createFilledArray(dataType: MADataType, + shape: Array[Int], + fillNum: Number, + fillBool: Boolean): Box[MultiArray] = { val array = MultiArray.factory(dataType, shape) val iter = array.getIndexIterator tryo { - if (fill != null) { - if (MADataType.DOUBLE == dataType) while ({ iter.hasNext }) iter.setDoubleNext(fill.doubleValue) - else if (MADataType.FLOAT == dataType) while ({ iter.hasNext }) iter.setFloatNext(fill.floatValue) - else if (MADataType.LONG == dataType) while ({ iter.hasNext }) iter.setLongNext(fill.longValue) - else if (MADataType.INT == dataType) while ({ iter.hasNext }) iter.setIntNext(fill.intValue) - else if (MADataType.SHORT == dataType) while ({ iter.hasNext }) iter.setShortNext(fill.shortValue) - else if (MADataType.BYTE == dataType) while ({ iter.hasNext }) iter.setByteNext(fill.byteValue) + if (fillNum != null) { + if (MADataType.DOUBLE == dataType) while ({ iter.hasNext }) iter.setDoubleNext(fillNum.doubleValue) + else if (MADataType.FLOAT == dataType) while ({ iter.hasNext }) iter.setFloatNext(fillNum.floatValue) + else if (MADataType.LONG == dataType) while ({ iter.hasNext }) iter.setLongNext(fillNum.longValue) + else if (MADataType.INT == dataType) while ({ iter.hasNext }) iter.setIntNext(fillNum.intValue) + else if (MADataType.SHORT == dataType) while ({ iter.hasNext }) iter.setShortNext(fillNum.shortValue) + else if (MADataType.BYTE == dataType) while ({ iter.hasNext }) iter.setByteNext(fillNum.byteValue) + else if (MADataType.BOOLEAN == dataType) while ({ iter.hasNext }) iter.setBooleanNext(fillBool) else throw new IllegalStateException } array diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3ArrayHeader.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3ArrayHeader.scala index c9bc3019e9..cb24d6af1d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3ArrayHeader.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3ArrayHeader.scala @@ -17,7 +17,7 @@ import com.scalableminds.webknossos.datastore.helpers.JsonImplicits import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer} import com.scalableminds.util.tools.Box.tryo import com.scalableminds.util.tools.{Box, Full} -import play.api.libs.json.{Format, JsArray, JsObject, JsResult, JsString, JsSuccess, JsValue, Json, OFormat} +import play.api.libs.json.{Format, JsArray, JsError, JsObject, JsResult, JsString, JsSuccess, JsValue, Json, OFormat} import java.nio.ByteOrder @@ -43,7 +43,11 @@ case class Zarr3ArrayHeader( override lazy val order: ArrayOrder = getOrder - override lazy val byteOrder: ByteOrder = ByteOrder.LITTLE_ENDIAN + override lazy val byteOrder: ByteOrder = if (codecs.exists { + case BytesCodecConfiguration(endian) if endian.contains("big") => true + case _ => false + }) ByteOrder.BIG_ENDIAN + else ByteOrder.LITTLE_ENDIAN private def zarr3DataType: Zarr3DataType = Zarr3DataType.fromString(data_type.left.getOrElse("extension")).getOrElse(raw) @@ -171,7 +175,18 @@ object Zarr3ArrayHeader extends JsonImplicits { data_type <- (json \ "data_type").validate[String] chunk_grid <- (json \ "chunk_grid").validate[ChunkGridSpecification] chunk_key_encoding <- (json \ "chunk_key_encoding").validate[ChunkKeyEncoding] - fill_value <- (json \ "fill_value").validate[Either[String, Number]] + fill_value_raw = json \ "fill_value" + fill_value <- (fill_value_raw.validate[String], + fill_value_raw.validate[Number], + fill_value_raw.validate[Boolean]) match { + case (asStr: JsSuccess[String], _, _) => + asStr.flatMap(value => JsSuccess[Either[String, Number]](Left(value))) + case (_, asNum: JsSuccess[Number], _) => + asNum.flatMap(value => JsSuccess[Either[String, Number]](Right(value))) + case (_, _, asBool: JsSuccess[Boolean]) => + asBool.flatMap(value => JsSuccess[Either[String, Number]](Left(value.toString))) + case _ => JsError("Could not parse fill_value as string, number or boolean value.") + } attributes = (json \ "attributes").validate[JsObject].asOpt codecsJsValue <- (json \ "codecs").validate[JsValue] codecs = readCodecs(codecsJsValue) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3DataType.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3DataType.scala index 065214abb6..0aab5470d6 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3DataType.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3DataType.scala @@ -24,7 +24,7 @@ object Zarr3DataType extends ExtendedEnumeration { def toArrayDataType(dataType: Zarr3DataType): ArrayDataType = dataType match { - case Zarr3DataType.bool => ??? + case Zarr3DataType.bool => ArrayDataType.bool case Zarr3DataType.int8 => ArrayDataType.i1 case Zarr3DataType.int16 => ArrayDataType.i2 case Zarr3DataType.int32 => ArrayDataType.i4 diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala index 6a5c53b98b..7fca9631dc 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala @@ -164,6 +164,11 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: performRequest } + def postFileWithJsonResponse[T: Reads](file: File): Fox[T] = { + request = request.withBody(file).withMethod("POST") + parseJsonResponse(performRequest) + } + def postFormWithJsonResponse[T: Reads](parameters: Map[String, String]): Fox[T] = { request = request.withBody(parameters).withMethod("POST") parseJsonResponse(performRequest) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala index 6c7ab8de5f..957979af16 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala @@ -18,7 +18,7 @@ import scala.concurrent.ExecutionContext class BinaryDataServiceHolder @Inject()(config: DataStoreConfig, remoteSourceDescriptorService: RemoteSourceDescriptorService, datasetErrorLoggingService: DSDatasetErrorLoggingService, - chunkCacheService: ChunkCacheService, + chunkCacheService: DSChunkCacheService, agglomerateService: AgglomerateService)(implicit ec: ExecutionContext) { val binaryDataService: BinaryDataService = new BinaryDataService( diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ChunkCacheService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ChunkCacheService.scala index 195085aa75..0780309dfa 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ChunkCacheService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ChunkCacheService.scala @@ -6,11 +6,13 @@ import com.scalableminds.webknossos.datastore.DataStoreConfig import ucar.ma2.{Array => MultiArray} import jakarta.inject.Inject -class ChunkCacheService @Inject()(config: DataStoreConfig) { +trait ChunkCacheService { + protected val maxSizeBytes: Long + lazy val sharedChunkContentsCache: AlfuCache[String, MultiArray] = { // Used by DatasetArray-based datasets. Measure item weight in kilobytes because the weigher can only return int, not long - val maxSizeKiloBytes = Math.floor(config.Datastore.Cache.ImageArrayChunks.maxSizeBytes.toDouble / 1000.0).toInt + val maxSizeKiloBytes = Math.floor(maxSizeBytes.toDouble / 1000.0).toInt def cacheWeight(key: String, arrayBox: Box[MultiArray]): Int = arrayBox match { @@ -22,3 +24,7 @@ class ChunkCacheService @Inject()(config: DataStoreConfig) { AlfuCache(maxSizeKiloBytes, weighFn = Some(cacheWeight)) } } + +class DSChunkCacheService @Inject()(config: DataStoreConfig) extends ChunkCacheService { + protected val maxSizeBytes: Long = config.Datastore.Cache.ImageArrayChunks.maxSizeBytes +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/connectome/ZarrConnectomeFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/connectome/ZarrConnectomeFileService.scala index 48cd0e2ebd..3cc7f5cf24 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/connectome/ZarrConnectomeFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/connectome/ZarrConnectomeFileService.scala @@ -8,7 +8,7 @@ import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} import com.scalableminds.webknossos.datastore.datareaders.DatasetArray import com.scalableminds.webknossos.datastore.datareaders.zarr3.Zarr3Array import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId -import com.scalableminds.webknossos.datastore.services.{ChunkCacheService, VoxelyticsZarrArtifactUtils} +import com.scalableminds.webknossos.datastore.services.{DSChunkCacheService, VoxelyticsZarrArtifactUtils} import com.scalableminds.webknossos.datastore.services.connectome.SynapticPartnerDirection.SynapticPartnerDirection import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService import jakarta.inject.Inject @@ -42,7 +42,7 @@ object ConnectomeFileAttributes extends VoxelyticsZarrArtifactUtils with Connect } class ZarrConnectomeFileService @Inject()(remoteSourceDescriptorService: RemoteSourceDescriptorService, - chunkCacheService: ChunkCacheService) + chunkCacheService: DSChunkCacheService) extends FoxImplicits with ConnectomeFileUtils { private lazy val openArraysCache = AlfuCache[(ConnectomeFileKey, String), DatasetArray]() diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mapping/ZarrAgglomerateService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mapping/ZarrAgglomerateService.scala index 75950cb4fd..cf10e5922c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mapping/ZarrAgglomerateService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mapping/ZarrAgglomerateService.scala @@ -13,7 +13,7 @@ import com.scalableminds.webknossos.datastore.datareaders.{DatasetArray, MultiAr import com.scalableminds.webknossos.datastore.geometry.Vec3IntProto import com.scalableminds.webknossos.datastore.helpers.{NativeBucketScanner, NodeDefaults, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, ElementClass} -import com.scalableminds.webknossos.datastore.services.{ChunkCacheService, DataConverter} +import com.scalableminds.webknossos.datastore.services.{DSChunkCacheService, DataConverter} import com.scalableminds.webknossos.datastore.storage.{AgglomerateFileKey, RemoteSourceDescriptorService} import com.typesafe.scalalogging.LazyLogging import ucar.ma2.{Array => MultiArray} @@ -25,7 +25,7 @@ import scala.concurrent.ExecutionContext class ZarrAgglomerateService @Inject()(config: DataStoreConfig, remoteSourceDescriptorService: RemoteSourceDescriptorService, - chunkCacheService: ChunkCacheService) + chunkCacheService: DSChunkCacheService) extends DataConverter with AgglomerateFileUtils with LazyLogging { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/ZarrMeshFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/ZarrMeshFileService.scala index e69feda8b3..72c5901cd7 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/ZarrMeshFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/ZarrMeshFileService.scala @@ -10,7 +10,7 @@ import com.scalableminds.webknossos.datastore.datareaders.zarr3.Zarr3Array import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.services.{ ArrayArtifactHashing, - ChunkCacheService, + DSChunkCacheService, VoxelyticsZarrArtifactUtils } import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService @@ -59,7 +59,7 @@ object MeshFileAttributes extends MeshFileUtils with VoxelyticsZarrArtifactUtils } } -class ZarrMeshFileService @Inject()(chunkCacheService: ChunkCacheService, +class ZarrMeshFileService @Inject()(chunkCacheService: DSChunkCacheService, remoteSourceDescriptorService: RemoteSourceDescriptorService) extends FoxImplicits with MeshFileUtils diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/segmentindex/ZarrSegmentIndexFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/segmentindex/ZarrSegmentIndexFileService.scala index 8852fbc19b..546a3a4278 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/segmentindex/ZarrSegmentIndexFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/segmentindex/ZarrSegmentIndexFileService.scala @@ -10,7 +10,7 @@ import com.scalableminds.webknossos.datastore.datareaders.zarr3.Zarr3Array import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.services.{ ArrayArtifactHashing, - ChunkCacheService, + DSChunkCacheService, VoxelyticsZarrArtifactUtils } import ucar.ma2.{Array => MultiArray} @@ -50,7 +50,7 @@ object SegmentIndexFileAttributes extends SegmentIndexFileUtils with VoxelyticsZ } class ZarrSegmentIndexFileService @Inject()(remoteSourceDescriptorService: RemoteSourceDescriptorService, - chunkCacheService: ChunkCacheService) + chunkCacheService: DSChunkCacheService) extends FoxImplicits with SegmentIndexFileUtils { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSChunkCacheService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSChunkCacheService.scala new file mode 100644 index 0000000000..a1cc3954b9 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSChunkCacheService.scala @@ -0,0 +1,8 @@ +package com.scalableminds.webknossos.tracingstore + +import com.scalableminds.webknossos.datastore.services.ChunkCacheService +import jakarta.inject.Inject + +class TSChunkCacheService @Inject()(config: TracingStoreConfig) extends ChunkCacheService { + protected val maxSizeBytes: Long = config.Tracingstore.Cache.chunkCacheMaxSizeBytes +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreConfig.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreConfig.scala index 1eadc218d9..b026bb2697 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreConfig.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreConfig.scala @@ -25,7 +25,10 @@ class TracingStoreConfig @Inject()(configuration: Configuration) extends ConfigR val address: String = get[String]("tracingstore.redis.address") val port: Int = get[Int]("tracingstore.redis.port") } - val children = List(WebKnossos, Fossildb) + object Cache { + val chunkCacheMaxSizeBytes: Long = get[Long]("tracingstore.cache.chunkCacheMaxSizeBytes") + } + val children = List(WebKnossos, Fossildb, Redis, Cache) } object SlackNotifications { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala index 0cc5a2f815..7e397e414e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala @@ -25,6 +25,7 @@ class TracingStoreModule extends AbstractModule { bind(classOf[TSSlackNotificationService]).asEagerSingleton() bind(classOf[AdHocMeshServiceHolder]).asEagerSingleton() bind(classOf[TSAnnotationService]).asEagerSingleton() + bind(classOf[TSChunkCacheService]).asEagerSingleton() bind(classOf[TemporaryTracingService]).asEagerSingleton() bind(classOf[TSDatasetErrorLoggingService]).asEagerSingleton() } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 286a31966d..013043b3df 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject import com.scalableminds.util.objectid.ObjectId +import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong @@ -17,6 +18,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ } import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService import com.scalableminds.util.tools.{Box, Empty, Failure, Full} +import com.scalableminds.webknossos.tracingstore.tracings.KeyValueStoreImplicits import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} @@ -29,7 +31,8 @@ class EditableMappingController @Inject()( accessTokenService: TracingStoreAccessTokenService, editableMappingService: EditableMappingService, editableMappingIOService: EditableMappingIOService)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) - extends Controller { + extends Controller + with KeyValueStoreImplicits { def editableMappingInfo(tracingId: String, annotationId: ObjectId, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => @@ -187,4 +190,23 @@ class EditableMappingController @Inject()( } } + def saveFromZip(tracingId: String, + annotationId: ObjectId, + startVersion: Long, + baseMappingName: String): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + editedEdgesZip <- request.body.asRaw.map(_.asFile).toFox ?~> "zip.file.notFound" + before = Instant.now + numberOfSavedVersions <- editableMappingIOService.initializeFromUploadedZip(tracingId, + annotationId, + startVersion, + baseMappingName, + editedEdgesZip) + _ = Instant.logSince(before, s"Initializing editable mapping $tracingId from zip") + } yield Ok(Json.toJson(numberOfSavedVersions)) + } + } + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/files/TempFileService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/files/TempFileService.scala index d9da63f02b..37a325aecd 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/files/TempFileService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/files/TempFileService.scala @@ -30,21 +30,30 @@ trait TempFileService extends LazyLogging { private def ensureParent(): Path = Files.createDirectories(tmpDir) - def create(prefix: String = "tmpFile", lifeTime: FiniteDuration = 2 hours): Path = { + def create(prefix: String = "tmpFile", lifeTime: FiniteDuration = 2 hours, isDirectory: Boolean = false): Path = { ensureParent() val path = tmpDir.resolve(f"$prefix-${Random.alphanumeric.take(15).mkString("")}") - logger.debug(f"Creating temp file at $path") - Files.createFile(path) + logger.debug(f"Creating temp ${if (isDirectory) "dir" else "file"} at $path") + if (isDirectory) + Files.createDirectory(path) + else + Files.createFile(path) activeTempFiles.add((path, Instant.now + lifeTime)) path } + def createDirectory(prefix: String = "tmpDir", lifeTime: FiniteDuration = 2 hours): Path = + create(prefix, lifeTime, isDirectory = true) + private def cleanUpExpiredFiles(): Fox[Unit] = { val now = Instant.now activeTempFiles.foreach { case (path, expiryTime) => if (expiryTime < now) { - tryo(Files.delete(path)) + if (Files.isDirectory(path)) + tryo(FileUtils.deleteDirectory(path.toFile)) + else + tryo(Files.delete(path)) activeTempFiles.remove((path, expiryTime)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingIOService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingIOService.scala index 1b5f3ee007..99e441713f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingIOService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingIOService.scala @@ -1,12 +1,23 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping +import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.io.{NamedFunctionStream, ZipIO} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant -import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.datareaders.zarr3._ +import com.scalableminds.webknossos.datastore.datavault.{FileSystemDataVault, VaultPath} +import com.scalableminds.webknossos.datastore.helpers.UPath +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.tracingstore.TSChunkCacheService +import com.scalableminds.webknossos.tracingstore.annotation.UpdateAction import com.scalableminds.webknossos.tracingstore.files.TsTempFileService +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} import com.typesafe.scalalogging.LazyLogging import jakarta.inject.Inject +import play.api.libs.json.Json +import ucar.ma2.{Array => MultiArray} import java.io.{BufferedOutputStream, File, FileOutputStream} import java.nio.ByteBuffer @@ -14,7 +25,13 @@ import java.nio.file.Path import java.util.zip.Deflater import scala.concurrent.ExecutionContext -class EditableMappingIOService @Inject()(tempFileService: TsTempFileService) extends LazyLogging { +class EditableMappingIOService @Inject()(tempFileService: TsTempFileService, + tracingDataStore: TracingDataStore, + chunkCacheService: TSChunkCacheService, + editableMappingService: EditableMappingService) + extends LazyLogging + with FoxImplicits + with KeyValueStoreImplicits { // 10000 edges per chunk (an edge is two Longs in edges and one bool in edgeIsAddition) private val ChunkSize: Int = 10000 @@ -111,4 +128,104 @@ class EditableMappingIOService @Inject()(tempFileService: TsTempFileService) ext private lazy val compressor = BloscCodec.fromConfiguration(BloscCodecConfiguration.defaultForWKZarrOutput).compressor + def initializeFromUploadedZip(tracingId: String, + annotationId: ObjectId, + startVersion: Long, + baseMappingName: String, + editedEdgesZip: File)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Long] = + for { + _ <- tracingDataStore.editableMappingsInfo.put( + tracingId, + 0L, // Note: updates start at startVersion, but info is V0 for consistency with the annotationProto v0 + toProtoBytes(editableMappingService.create(baseMappingName)) + ) + // The zarr file contains two arrays, editedEdges encodes the source and destination segment ids + // and edgeIsAddition contains a bool for each edge on whether it was + // added (edgeIsAddition==true; corresponds to a merge agglomerate action). + // or removed (edgeIsAddition==false; corresponds to a splict agglomerate action) + (editedEdgesArray, edgeIsAdditionArray) <- unzipAndReadZarr(editedEdgesZip) + timestamp = Instant.now.epochMillis + updateActions: Seq[UpdateAction] = (0 until edgeIsAdditionArray.getSize.toInt).map { edgeIndex => + val edgeSrc = editedEdgesArray.getLong(editedEdgesArray.getIndex.set(Array(edgeIndex, 0))) + val edgeDst = editedEdgesArray.getLong(editedEdgesArray.getIndex.set(Array(edgeIndex, 1))) + val edgeIsAddition = edgeIsAdditionArray.getBoolean(edgeIndex) + buildUpdateActionFromEdge(edgeSrc, edgeDst, edgeIsAddition, tracingId, timestamp) + } + updatesGrouped = updateActions.grouped(100).toSeq + _ <- Fox.serialCombined(updatesGrouped.zipWithIndex) { + case (updateGroup: Seq[UpdateAction], updateGroupIndex) => + tracingDataStore.annotationUpdates.put(annotationId.toString, + startVersion + updateGroupIndex, + Json.toJson(updateGroup)) + } + numberOfSavedVersions = updatesGrouped.length + } yield numberOfSavedVersions + + private def unzipAndReadZarr(editedEdgesZip: File)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[(MultiArray, MultiArray)] = { + val unzippedDir = tempFileService.createDirectory() + for { + _ <- ZipIO + .unzipToDirectory(editedEdgesZip, + unzippedDir, + includeHiddenFiles = true, + List.empty, + truncateCommonPrefix = false, + excludeFromPrefix = None) + .toFox + unzippedVaultPath = new VaultPath(UPath.fromLocalPath(unzippedDir), FileSystemDataVault.create) + editedEdgesZarrArray <- Zarr3Array.open(unzippedVaultPath / "edges/", + DataSourceId("dummy", "unused"), + "layer", + None, + None, + None, + chunkCacheService.sharedChunkContentsCache) + edgeIsAdditionZarrArray <- Zarr3Array.open(unzippedVaultPath / "edgeIsAddition/", + DataSourceId("dummy", "unused"), + "layer", + None, + None, + None, + chunkCacheService.sharedChunkContentsCache) + numEdges <- editedEdgesZarrArray.datasetShape.flatMap(_.headOption).toFox + _ <- Fox.fromBool(numEdges.toInt.toLong == numEdges) ?~> "editableMappingFromZip.numEdges.exceedsInt" + editedEdges <- editedEdgesZarrArray.readAsMultiArray(offset = Array(0L, 0L), shape = Array(numEdges.toInt, 2)) + edgeIsAddition <- edgeIsAdditionZarrArray.readAsMultiArray(offset = 0L, shape = numEdges.toInt) + } yield (editedEdges, edgeIsAddition) + } + + private def buildUpdateActionFromEdge(edgeSrc: Long, + edgeDst: Long, + edgeIsAddition: Boolean, + tracingId: String, + timestamp: Long): EditableMappingUpdateAction = + if (edgeIsAddition) { + MergeAgglomerateUpdateAction( + agglomerateId1 = 0, + agglomerateId2 = 0, + segmentPosition1 = None, + segmentPosition2 = None, + segmentId1 = Some(edgeSrc), + segmentId2 = Some(edgeDst), + mag = Vec3Int.ones, // unused, as we do not look up segment ids by positions + actionTracingId = tracingId, + actionTimestamp = Some(timestamp), + actionAuthorId = None, + info = None + ) + } else { + SplitAgglomerateUpdateAction( + agglomerateId = 0, + segmentPosition1 = None, + segmentPosition2 = None, + segmentId1 = Some(edgeSrc), + segmentId2 = Some(edgeDst), + mag = Vec3Int.ones, // unused, as we do not look up segment ids by positions + actionTracingId = tracingId, + actionTimestamp = Some(timestamp), + actionAuthorId = None, + info = None + ) + } } diff --git a/webknossos-tracingstore/conf/standalone-tracingstore.conf b/webknossos-tracingstore/conf/standalone-tracingstore.conf index 75fc093b4e..002e6025ab 100644 --- a/webknossos-tracingstore/conf/standalone-tracingstore.conf +++ b/webknossos-tracingstore/conf/standalone-tracingstore.conf @@ -61,6 +61,7 @@ tracingstore { address = "localhost" port = 6379 } + cache.chunkCacheMaxSizeBytes = 2000000000 # 2 GB } slackNotifications { diff --git a/webknossos-tracingstore/conf/tracingstore.latest.routes b/webknossos-tracingstore/conf/tracingstore.latest.routes index 4ed67f7558..adb4a34547 100644 --- a/webknossos-tracingstore/conf/tracingstore.latest.routes +++ b/webknossos-tracingstore/conf/tracingstore.latest.routes @@ -40,6 +40,7 @@ POST /mapping/:tracingId/agglomerateGraphNeighbors GET /mapping/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateSkeleton(tracingId: String, agglomerateId: Long) GET /mapping/:tracingId/agglomerateGraph/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraph(tracingId: String, agglomerateId: Long, version: Option[Long]) GET /mapping/:tracingId/editedEdgesZip @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editedEdgesZip(tracingId: String, version: Option[Long]) +POST /mapping/:tracingId/save @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.saveFromZip(tracingId: String, annotationId: ObjectId, startVersion: Long, baseMappingName: String) # Zarr endpoints for volume annotations # Zarr version 2