Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
c6adc16
WIP Reupload exported editable mapping annotation zip
fm3 Sep 30, 2025
b1091e6
access edges location
fm3 Sep 30, 2025
c874e70
call rpc to tracingstore
fm3 Sep 30, 2025
83509f2
WIP implement saveFroMZip rpc
fm3 Sep 30, 2025
c6a85da
Merge branch 'master' into upload-editable-mapping
fm3 Oct 1, 2025
034c28a
open uploaded edge zarr arrays
fm3 Oct 1, 2025
a7ff6ad
Fix zarr3 header json reads
fm3 Oct 1, 2025
d077ef7
error case
fm3 Oct 1, 2025
b69baaa
Merge branch 'master' into upload-editable-mapping
fm3 Oct 2, 2025
95d04e8
allow reading boolean zarr arrays
fm3 Oct 2, 2025
758a5d7
store update actions, fix annotation id, tracingid
fm3 Oct 2, 2025
e944f1a
respect bytes codec endianness when reading zarr3 arrays
fm3 Oct 2, 2025
1ec08e8
Merge branch 'master' into upload-editable-mapping
fm3 Oct 6, 2025
8c54c06
fix segment id; mappingName
fm3 Oct 6, 2025
1641619
set earliestAccessibleVersion
fm3 Oct 6, 2025
79221c4
chunk contents cache also for tracingstore
fm3 Oct 6, 2025
a639433
group updates when saving
fm3 Oct 6, 2025
055078a
First update needs to be at v1, not v0
fm3 Oct 6, 2025
27e8cde
propagate boolean fill value to empty chunks
fm3 Oct 6, 2025
2d7f68a
format
fm3 Oct 6, 2025
5874ce5
move to service, split function
fm3 Oct 6, 2025
3d0288e
add assertion for duplicate fallback layers. densify update versions …
fm3 Oct 6, 2025
e07251f
changelog
fm3 Oct 6, 2025
b42cf2b
cleanup
fm3 Oct 6, 2025
8fcf0bb
log duration
fm3 Oct 6, 2025
80e8a1d
Merge branch 'master' into upload-editable-mapping
fm3 Oct 7, 2025
1889fc4
clean multi-layer versioning
fm3 Oct 7, 2025
feb8eba
convert updatesGrouped toSeq, to avoid iterator statefulness
fm3 Oct 7, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions app/Startup.scala
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ class Startup @Inject()(actorSystem: ActorSystem,
}

private def ensurePostgresSchema(): Unit = {
logger.info("Checking database schema")
logger.info("Checking database schema...")

val errorMessageBuilder = mutable.ListBuffer[String]()
val capturingProcessLogger =
Expand All @@ -115,7 +115,7 @@ class Startup @Inject()(actorSystem: ActorSystem,
}

private def ensurePostgresDatabase(): Unit = {
logger.info(s"Ensuring Postgres database")
logger.info(s"Ensuring Postgres database...")
val processLogger =
ProcessLogger((o: String) => logger.info(s"dbtool: $o"), (e: String) => logger.error(s"dbtool: $e"))

Expand Down
100 changes: 62 additions & 38 deletions app/controllers/AnnotationIOController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -132,25 +132,25 @@ class AnnotationIOController @Inject()(
volumeLayersGrouped <- adaptVolumeTracingsToFallbackLayer(volumeLayersGroupedRaw, dataset, usableDataSource)
tracingStoreClient <- tracingStoreService.clientFor(dataset)
newAnnotationId = ObjectId.generate
mergedVolumeLayers <- mergeAndSaveVolumeLayers(newAnnotationId,
volumeLayersGrouped,
tracingStoreClient,
parsedFiles.otherFiles,
usableDataSource,
dataset._id)
(mergedVolumeLayers, earliestAccessibleVersion) <- mergeAndSaveVolumeLayers(newAnnotationId,
volumeLayersGrouped,
tracingStoreClient,
parsedFiles.otherFiles,
usableDataSource,
dataset._id)
mergedSkeletonLayers <- mergeAndSaveSkeletonLayers(skeletonTracings, tracingStoreClient)
annotation <- annotationService.createFrom(request.identity,
dataset,
mergedSkeletonLayers ::: mergedVolumeLayers,
AnnotationType.Explorational,
name,
description,
ObjectId.generate)
newAnnotationId)
annotationProto = AnnotationProto(
description = annotation.description,
version = 0L,
annotationLayers = annotation.annotationLayers.map(_.toProto),
earliestAccessibleVersion = 0L
earliestAccessibleVersion = earliestAccessibleVersion
)
_ <- tracingStoreClient.saveAnnotationProto(annotation._id, annotationProto)
_ <- annotationDAO.insertOne(annotation)
Expand All @@ -168,32 +168,55 @@ class AnnotationIOController @Inject()(
client: WKRemoteTracingStoreClient,
otherFiles: Map[String, File],
dataSource: UsableDataSource,
datasetId: ObjectId): Fox[List[AnnotationLayer]] =
datasetId: ObjectId): Fox[(List[AnnotationLayer], Long)] =
if (volumeLayersGrouped.isEmpty)
Fox.successful(List())
Fox.successful(List(), 0L)
else if (volumeLayersGrouped.exists(layersOfAnnotation =>
layersOfAnnotation.length != layersOfAnnotation.distinctBy(_.tracing.fallbackLayer).length))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this assertion here is not correct. What if I upload an annotation with 2 "custom" volume layers where each has no fallback layer? In that case this assertion would fail but the annotation should be correct 🤔

Fox.failure("Cannot save annotation with multiple volume layers that have the same fallback segmentation layer.")
else if (volumeLayersGrouped.length > 1 && volumeLayersGrouped.exists(_.length > 1))
Fox.failure("Cannot merge multiple annotations that each have multiple volume layers.")
else if (volumeLayersGrouped.length == 1) { // Just one annotation was uploaded, keep its layers separate
Fox.serialCombined(volumeLayersGrouped.toList.flatten.zipWithIndex) { volumeLayerWithIndex =>
val uploadedVolumeLayer = volumeLayerWithIndex._1
val idx = volumeLayerWithIndex._2
val newTracingId = TracingId.generate
for {
_ <- client.saveVolumeTracing(newAnnotationId,
newTracingId,
uploadedVolumeLayer.tracing,
uploadedVolumeLayer.getDataZipFrom(otherFiles),
dataSource = dataSource,
datasetId = datasetId)
} yield
AnnotationLayer(
newTracingId,
AnnotationLayerType.Volume,
uploadedVolumeLayer.name.getOrElse(AnnotationLayer.defaultVolumeLayerName + idx.toString),
AnnotationLayerStatistics.unknown
)
}
} else { // Multiple annotations with volume layers (but at most one each) was uploaded merge those volume layers into one
else if (volumeLayersGrouped.length > 1 && volumeLayersGrouped.exists(
_.exists(_.editedMappingEdgesLocation.isDefined))) {
Fox.failure("Cannot merge multiple annotations with editable mapping (proofreading) edges.")
} else if (volumeLayersGrouped.length == 1) { // Just one annotation was uploaded, keep its layers separate
var layerUpdatesStartVersionMutable = 1L
for {
annotationLayers <- Fox.serialCombined(volumeLayersGrouped.toList.flatten.zipWithIndex) {
volumeLayerWithIndex =>
val uploadedVolumeLayer = volumeLayerWithIndex._1
val idx = volumeLayerWithIndex._2
val newTracingId = TracingId.generate
for {
numberOfSavedVersions <- client.saveEditableMappingIfPresent(
newAnnotationId,
newTracingId,
uploadedVolumeLayer.getEditableMappingEdgesZipFrom(otherFiles),
uploadedVolumeLayer.editedMappingBaseMappingName,
startVersion = layerUpdatesStartVersionMutable
)
// The next layer’s update actions then need to start after this one
_ = layerUpdatesStartVersionMutable = layerUpdatesStartVersionMutable + numberOfSavedVersions
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why is there an assignment to _? I think this is unnecessary

Suggested change
_ = layerUpdatesStartVersionMutable = layerUpdatesStartVersionMutable + numberOfSavedVersions
layerUpdatesStartVersionMutable = layerUpdatesStartVersionMutable + numberOfSavedVersions

mappingName = if (uploadedVolumeLayer.editedMappingEdgesLocation.isDefined) Some(newTracingId)
else uploadedVolumeLayer.tracing.mappingName
_ <- client.saveVolumeTracing(
newAnnotationId,
newTracingId,
uploadedVolumeLayer.tracing.copy(mappingName = mappingName),
uploadedVolumeLayer.getDataZipFrom(otherFiles),
dataSource = dataSource,
datasetId = datasetId
)
} yield
AnnotationLayer(
newTracingId,
AnnotationLayerType.Volume,
uploadedVolumeLayer.name.getOrElse(AnnotationLayer.defaultVolumeLayerName + idx.toString),
AnnotationLayerStatistics.unknown
)
}
} yield (annotationLayers, layerUpdatesStartVersionMutable)
} else { // Multiple annotations with volume layers (but at most one each) was uploaded, they have no editable mappings. merge those volume layers into one
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Grammar / punctuation is a little off in the comment. Maybe something like
// Multiple annotations with volume layers (but at most one each) were uploaded. Merge those volume layers into one. None has editable mappings.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why can't there be more than one volume layer per annotation? Is such an upload not supported in general? In that case, my comment above regarding the assertion is wrong.

val uploadedVolumeLayersFlat = volumeLayersGrouped.toList.flatten
val newTracingId = TracingId.generate
for {
Expand All @@ -206,13 +229,14 @@ class AnnotationIOController @Inject()(
uploadedVolumeLayersFlat.map(v => v.getDataZipFrom(otherFiles))
)
} yield
List(
AnnotationLayer(
newTracingId,
AnnotationLayerType.Volume,
AnnotationLayer.defaultVolumeLayerName,
AnnotationLayerStatistics.unknown
))
(List(
AnnotationLayer(
newTracingId,
AnnotationLayerType.Volume,
AnnotationLayer.defaultVolumeLayerName,
AnnotationLayerStatistics.unknown
)),
0L)
}

private def mergeAndSaveSkeletonLayers(skeletonTracings: List[SkeletonTracing],
Expand Down
10 changes: 9 additions & 1 deletion app/models/annotation/AnnotationUploadService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,17 @@ import play.api.i18n.MessagesProvider

import scala.concurrent.{ExecutionContext, Future}

case class UploadedVolumeLayer(tracing: VolumeTracing, dataZipLocation: String, name: Option[String]) {
case class UploadedVolumeLayer(tracing: VolumeTracing,
dataZipLocation: String,
name: Option[String],
editedMappingEdgesLocation: Option[String],
editedMappingBaseMappingName: Option[String]) {
def getDataZipFrom(otherFiles: Map[String, File]): Option[File] =
otherFiles.get(dataZipLocation)

def getEditableMappingEdgesZipFrom(otherFiles: Map[String, File]): Option[File] =
editedMappingEdgesLocation.flatMap(otherFiles.get)

}

case class SharedParsingParameters(useZipName: Boolean,
Expand Down
17 changes: 17 additions & 0 deletions app/models/annotation/WKRemoteTracingStoreClient.scala
Original file line number Diff line number Diff line change
Expand Up @@ -249,6 +249,23 @@ class WKRemoteTracingStoreClient(
} yield ()
}

def saveEditableMappingIfPresent(annotationId: ObjectId,
newTracingId: String,
editedMappingEdgesZip: Option[File],
editedMappingBaseMappingName: Option[String],
startVersion: Long): Fox[Long] =
(editedMappingEdgesZip, editedMappingBaseMappingName) match {
case (Some(zipfile), Some(baseMappingName)) =>
rpc(s"${tracingStore.url}/tracings/mapping/$newTracingId/save")
.addQueryString("token" -> RpcTokenHolder.webknossosToken)
.addQueryString("annotationId" -> annotationId.toString)
.addQueryString("baseMappingName" -> baseMappingName)
.addQueryString("startVersion" -> startVersion.toString)
.postFileWithJsonResponse[Long](zipfile)
case (None, None) => Fox.successful(0L)
case _ => Fox.failure("annotation.upload.editableMappingIncompleteInformation")
}

def getVolumeTracing(annotationId: ObjectId,
annotationLayer: AnnotationLayer,
version: Option[Long],
Expand Down
9 changes: 7 additions & 2 deletions app/models/annotation/nml/NmlParser.scala
Original file line number Diff line number Diff line change
Expand Up @@ -91,10 +91,13 @@ class NmlParser @Inject()(datasetDAO: DatasetDAO)
segmentGroups = v.segmentGroups,
hasSegmentIndex = None, // Note: this property may be adapted later in adaptPropertiesToFallbackLayer
editPositionAdditionalCoordinates = nmlParams.editPositionAdditionalCoordinates,
additionalAxes = nmlParams.additionalAxisProtos
additionalAxes = nmlParams.additionalAxisProtos,
hasEditableMapping = if (v.editedMappingEdgesLocation.isDefined) Some(true) else None
),
basePath.getOrElse("") + v.dataZipPath,
v.name,
v.editedMappingEdgesLocation.map(location => basePath.getOrElse("") + location),
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't this preferably use UPath or so to use a / operator instead of contacting strings? Overall the result would very likely be a simple string concatenation but it might feel safer? 🤔

v.editedMappingBaseMappingName
)
}
skeletonTracing: SkeletonTracing = SkeletonTracing(
Expand Down Expand Up @@ -220,7 +223,9 @@ class NmlParser @Inject()(datasetDAO: DatasetDAO)
getSingleAttributeOpt(node, "name"),
parseVolumeSegmentMetadata(node \ "segments" \ "segment"),
getSingleAttributeOpt(node, "largestSegmentId").flatMap(_.toLongOpt),
extractSegmentGroups(node \ "groups").getOrElse(List())
extractSegmentGroups(node \ "groups").getOrElse(List()),
getSingleAttributeOpt(node, "editedMappingEdgesLocation"),
getSingleAttributeOpt(node, "editedMappingBaseMappingName")
)
}
)
Expand Down
4 changes: 3 additions & 1 deletion app/models/annotation/nml/NmlVolumeTag.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,6 @@ case class NmlVolumeTag(dataZipPath: String,
name: Option[String],
segments: Seq[Segment],
largestSegmentId: Option[Long],
segmentGroups: Seq[SegmentGroup]) {}
segmentGroups: Seq[SegmentGroup],
editedMappingEdgesLocation: Option[String],
editedMappingBaseMappingName: Option[String])
1 change: 1 addition & 0 deletions conf/application.conf
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,7 @@ tracingstore {
address = "localhost"
port = 6379
}
cache.chunkCacheMaxSizeBytes = 2000000000 # 2 GB
}

# Serve image data. Only active if the corresponding play module is enabled
Expand Down
1 change: 1 addition & 0 deletions conf/messages
Original file line number Diff line number Diff line change
Expand Up @@ -267,6 +267,7 @@ annotation.idForTracing.failed=Could not find the annotation id for this tracing
annotation.editableMapping.getAgglomerateGraph.failed=Could not look up an agglomerate graph for requested agglomerate.
annotation.editableMapping.getAgglomerateIdsForSegments.failed=Could not look up agglomerate ids for requested segments.
annotation.duplicate.failed=Failed to duplicate annotation
annotation.upload.editableMappingIncompleteInformation=Could not store editable mapping, either file or baseMappingName is missing.

mesh.file.listChunks.failed=Failed to load chunk list for segment {0} from mesh file “{1}”
mesh.file.loadChunk.failed=Failed to load mesh chunk for segment
Expand Down
4 changes: 2 additions & 2 deletions frontend/javascripts/viewer/view/version_entry.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ const descriptionFns: Record<
? `at position ${action.value.segmentPosition1}`
: (action.value.segmentId1 ?? "unknown");
const segment2Description =
action.value.segmentPosition2 ?? action.value.segmentId1 ?? "unknown";
action.value.segmentPosition2 ?? action.value.segmentId2 ?? "unknown";
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

uff xD, thanks for fixing this

const description = `Split agglomerate ${action.value.agglomerateId} by separating the segments ${segment1Description} and ${segment2Description}.`;
return {
description,
Expand All @@ -180,7 +180,7 @@ const descriptionFns: Record<
? `at position ${action.value.segmentPosition1}`
: (action.value.segmentId1 ?? "unknown");
const segment2Description =
action.value.segmentPosition2 ?? action.value.segmentId1 ?? "unknown";
action.value.segmentPosition2 ?? action.value.segmentId2 ?? "unknown";
const description = `Merged agglomerates ${action.value.agglomerateId1} and ${action.value.agglomerateId2} by combining the segments ${segment1Description} and ${segment2Description}.`;
return {
description,
Expand Down
2 changes: 2 additions & 0 deletions unreleased_changes/8969.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
### Added
- Editable mapping (aka proofreading) annotations can now be downloaded as zipfile and re-uploaded.
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ class DataStoreModule extends AbstractModule {
bind(classOf[Hdf5ConnectomeFileService]).asEagerSingleton()
bind(classOf[NeuroglancerPrecomputedMeshFileService]).asEagerSingleton()
bind(classOf[RemoteSourceDescriptorService]).asEagerSingleton()
bind(classOf[ChunkCacheService]).asEagerSingleton()
bind(classOf[DSChunkCacheService]).asEagerSingleton()
bind(classOf[DatasetCache]).asEagerSingleton()
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,62 +4,66 @@ import com.scalableminds.util.enumeration.ExtendedEnumeration

object ArrayDataType extends ExtendedEnumeration {
type ArrayDataType = Value
val f8, f4, i8, u8, i4, u4, i2, u2, i1, u1 = Value
val f8, f4, i8, u8, i4, u4, i2, u2, i1, u1, bool = Value

def bytesPerElement(dataType: ArrayDataType): Int =
dataType match {
case ArrayDataType.f8 => 8
case ArrayDataType.f4 => 4
case ArrayDataType.i8 => 8
case ArrayDataType.u8 => 8
case ArrayDataType.i4 => 4
case ArrayDataType.u4 => 4
case ArrayDataType.i2 => 2
case ArrayDataType.u2 => 2
case ArrayDataType.i1 => 1
case ArrayDataType.u1 => 1
case ArrayDataType.f8 => 8
case ArrayDataType.f4 => 4
case ArrayDataType.i8 => 8
case ArrayDataType.u8 => 8
case ArrayDataType.i4 => 4
case ArrayDataType.u4 => 4
case ArrayDataType.i2 => 2
case ArrayDataType.u2 => 2
case ArrayDataType.i1 => 1
case ArrayDataType.u1 => 1
case ArrayDataType.bool => 1
}

def maxValue(dataType: ArrayDataType): Number =
dataType match {
case ArrayDataType.f8 => Double.MaxValue
case ArrayDataType.f4 => Float.MaxValue
case ArrayDataType.i8 => Long.MaxValue
case ArrayDataType.u8 => Long.MaxValue // Max value for primitive datatypes
case ArrayDataType.i4 => Int.MaxValue
case ArrayDataType.u4 => Math.pow(2, 4 * 8).toLong - 1
case ArrayDataType.i2 => Char.MaxValue
case ArrayDataType.u2 => Math.pow(2, 2 * 8).toLong - 1
case ArrayDataType.i1 => Byte.MaxValue
case ArrayDataType.u1 => Math.pow(2, 1 * 8).toLong - 1
case ArrayDataType.f8 => Double.MaxValue
case ArrayDataType.f4 => Float.MaxValue
case ArrayDataType.i8 => Long.MaxValue
case ArrayDataType.u8 => Long.MaxValue // Max value for primitive datatypes
case ArrayDataType.i4 => Int.MaxValue
case ArrayDataType.u4 => Math.pow(2, 4 * 8).toLong - 1
case ArrayDataType.i2 => Char.MaxValue
case ArrayDataType.u2 => Math.pow(2, 2 * 8).toLong - 1
case ArrayDataType.i1 => Byte.MaxValue
case ArrayDataType.u1 => Math.pow(2, 1 * 8).toLong - 1
case ArrayDataType.bool => 1
}

def minValue(dataType: ArrayDataType): Number =
dataType match {
case ArrayDataType.f8 => Double.MinValue
case ArrayDataType.f4 => Float.MinValue
case ArrayDataType.i8 => Long.MinValue
case ArrayDataType.u8 => 0
case ArrayDataType.i4 => Int.MinValue
case ArrayDataType.u4 => 0
case ArrayDataType.i2 => Char.MinValue
case ArrayDataType.u2 => 0
case ArrayDataType.i1 => Byte.MinValue
case ArrayDataType.u1 => 0
case ArrayDataType.f8 => Double.MinValue
case ArrayDataType.f4 => Float.MinValue
case ArrayDataType.i8 => Long.MinValue
case ArrayDataType.u8 => 0
case ArrayDataType.i4 => Int.MinValue
case ArrayDataType.u4 => 0
case ArrayDataType.i2 => Char.MinValue
case ArrayDataType.u2 => 0
case ArrayDataType.i1 => Byte.MinValue
case ArrayDataType.u1 => 0
case ArrayDataType.bool => 0
}

def toWKWId(dataType: ArrayDataType): Int =
dataType match {
case ArrayDataType.u1 => 1
case ArrayDataType.u2 => 2
case ArrayDataType.u4 => 3
case ArrayDataType.u8 => 4
case ArrayDataType.f4 => 5
case ArrayDataType.f8 => 6
case ArrayDataType.i1 => 7
case ArrayDataType.i2 => 8
case ArrayDataType.i4 => 9
case ArrayDataType.i8 => 10
case ArrayDataType.u1 => 1
case ArrayDataType.u2 => 2
case ArrayDataType.u4 => 3
case ArrayDataType.u8 => 4
case ArrayDataType.f4 => 5
case ArrayDataType.f8 => 6
case ArrayDataType.i1 => 7
case ArrayDataType.i2 => 8
case ArrayDataType.i4 => 9
case ArrayDataType.i8 => 10
case ArrayDataType.bool => ???
}

def fromWKWTypeId(wkwVoxelTypeId: Int): ArrayDataType.Value =
Expand Down
Loading