diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 806ceeba709..02a80e0f0ae 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -451,4 +451,12 @@ class AnnotationController @Inject()( } } + def releaseMutex(id: ObjectId): Action[AnyContent] = sil.SecuredAction.async { implicit request => + logTime(slackNotificationService.noticeSlowRequest, durationThreshold = 1 second) { + for { + _ <- annotationMutexService.release(id, request.identity._id) ?~> "annotation.mutex.failed" + } yield Ok + } + } + } diff --git a/app/models/annotation/AnnotationMutexService.scala b/app/models/annotation/AnnotationMutexService.scala index 98e4dd1beef..543fda0db66 100644 --- a/app/models/annotation/AnnotationMutexService.scala +++ b/app/models/annotation/AnnotationMutexService.scala @@ -68,6 +68,17 @@ class AnnotationMutexService @Inject()(val lifecycle: ApplicationLifecycle, _ <- annotationMutexDAO.upsertOne(mutex.copy(expiry = Instant.in(defaultExpiryTime))) } yield MutexResult(canEdit = true, None) + def release(annotationId: ObjectId, userId: ObjectId): Fox[Unit] = + for { + mutex <- annotationMutexDAO.findOne(annotationId).shiftBox + _ <- mutex match { + case Full(mutex) if mutex.userId == userId => + annotationMutexDAO.deleteOne(annotationId).map(_ => ()) + case _ => + Fox.successful(()) + } + } yield () + def publicWrites(mutexResult: MutexResult): Fox[JsObject] = for { userOpt <- Fox.runOptional(mutexResult.blockedByUser)(user => userDAO.findOne(user)(GlobalAccessContext)) @@ -114,4 +125,6 @@ class AnnotationMutexDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionC def deleteExpired(): Fox[Int] = run(q"DELETE FROM webknossos.annotation_mutexes WHERE expiry < NOW()".asUpdate) + def deleteOne(annotationId: ObjectId): Fox[Int] = + run(q"DELETE FROM webknossos.annotation_mutexes WHERE _annotation = $annotationId".asUpdate) } diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index b095dfe6775..7e57987a38d 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -155,6 +155,7 @@ DELETE /annotations/:id POST /annotations/:id/merge/:mergedTyp/:mergedId controllers.AnnotationController.mergeWithoutType(id: ObjectId, mergedTyp: String, mergedId: ObjectId) GET /annotations/:id/download controllers.AnnotationIOController.downloadWithoutType(id: ObjectId, version: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) POST /annotations/:id/acquireMutex controllers.AnnotationController.tryAcquiringAnnotationMutex(id: ObjectId) +DELETE /annotations/:id/mutex controllers.AnnotationController.releaseMutex(id: ObjectId) GET /annotations/:typ/:id/info controllers.AnnotationController.info(typ: String, id: ObjectId, timestamp: Option[Long]) DELETE /annotations/:typ/:id controllers.AnnotationController.cancel(typ: String, id: ObjectId) diff --git a/frontend/javascripts/admin/rest_api.ts b/frontend/javascripts/admin/rest_api.ts index 5b4459ef640..59ecc087af2 100644 --- a/frontend/javascripts/admin/rest_api.ts +++ b/frontend/javascripts/admin/rest_api.ts @@ -719,6 +719,12 @@ export async function acquireAnnotationMutex( return { canEdit, blockedByUser }; } +export async function releaseAnnotationMutex(annotationId: string): Promise { + await Request.receiveJSON(`/api/annotations/${annotationId}/mutex`, { + method: "DELETE", + }); +} + export async function getTracingForAnnotationType( annotation: APIAnnotation, annotationLayerDescriptor: AnnotationLayerDescriptor, @@ -767,8 +773,9 @@ export function getUpdateActionLog( annotationId: string, oldestVersion?: number, newestVersion?: number, + sortAscending: boolean = false, ): Promise> { - return doWithToken((token) => { + return doWithToken(async (token) => { const params = new URLSearchParams(); params.set("token", token); if (oldestVersion != null) { @@ -777,9 +784,14 @@ export function getUpdateActionLog( if (newestVersion != null) { params.set("newestVersion", newestVersion.toString()); } - return Request.receiveJSON( + const log: APIUpdateActionBatch[] = await Request.receiveJSON( `${tracingStoreUrl}/tracings/annotation/${annotationId}/updateActionLog?${params}`, ); + + if (sortAscending) { + log.reverse(); + } + return log; }); } @@ -1969,6 +1981,9 @@ export async function getAgglomeratesForSegmentsFromDatastore, ): Promise { + if (segmentIds.length === 0) { + return new Map(); + } const segmentIdBuffer = serializeProtoListOfLong(segmentIds); const listArrayBuffer: ArrayBuffer = await doWithToken((token) => { const params = new URLSearchParams({ token }); @@ -1999,6 +2014,9 @@ export async function getAgglomeratesForSegmentsFromTracingstore { + if (segmentIds.length === 0) { + return new Map(); + } const params = new URLSearchParams({ annotationId }); if (version != null) { params.set("version", version.toString()); @@ -2181,7 +2199,7 @@ export function getSynapseTypes( ); } -type MinCutTargetEdge = { +export type MinCutTargetEdge = { position1: Vector3; position2: Vector3; segmentId1: number; diff --git a/frontend/javascripts/libs/utils.ts b/frontend/javascripts/libs/utils.ts index 66b229b60c3..adec068bdae 100644 --- a/frontend/javascripts/libs/utils.ts +++ b/frontend/javascripts/libs/utils.ts @@ -1,18 +1,12 @@ +import { Chalk } from "chalk"; import dayjs from "dayjs"; import naturalSort from "javascript-natural-sort"; import window, { document, location } from "libs/window"; import _ from "lodash"; import type { APIDataset, APIUser, MapEntries } from "types/api_types"; +import type { BoundingBoxMinMaxType } from "types/bounding_box"; import type { ArbitraryObject, Comparator } from "types/globals"; -import type { - BoundingBoxType, - ColorObject, - Point3, - TypedArray, - Vector3, - Vector4, - Vector6, -} from "viewer/constants"; +import type { ColorObject, Point3, TypedArray, Vector3, Vector4, Vector6 } from "viewer/constants"; import type { TreeGroup } from "viewer/model/types/tree_types"; import type { BoundingBoxObject, NumberLike, SegmentGroup } from "viewer/store"; @@ -276,7 +270,7 @@ export function getRandomColor(): Vector3 { return randomColor as any as Vector3; } -export function computeBoundingBoxFromArray(bb: Vector6): BoundingBoxType { +export function computeBoundingBoxFromArray(bb: Vector6): BoundingBoxMinMaxType { const [x, y, z, width, height, depth] = bb; return { min: [x, y, z], @@ -284,11 +278,15 @@ export function computeBoundingBoxFromArray(bb: Vector6): BoundingBoxType { }; } -export function computeBoundingBoxFromBoundingBoxObject(bb: BoundingBoxObject): BoundingBoxType { +export function computeBoundingBoxFromBoundingBoxObject( + bb: BoundingBoxObject, +): BoundingBoxMinMaxType { return computeBoundingBoxFromArray([...bb.topLeft, bb.width, bb.height, bb.depth]); } -export function computeBoundingBoxObjectFromBoundingBox(bb: BoundingBoxType): BoundingBoxObject { +export function computeBoundingBoxObjectFromBoundingBox( + bb: BoundingBoxMinMaxType, +): BoundingBoxObject { const boundingBoxArray = computeArrayFromBoundingBox(bb); return { topLeft: [boundingBoxArray[0], boundingBoxArray[1], boundingBoxArray[2]], @@ -298,7 +296,7 @@ export function computeBoundingBoxObjectFromBoundingBox(bb: BoundingBoxType): Bo }; } -export function computeArrayFromBoundingBox(bb: BoundingBoxType): Vector6 { +export function computeArrayFromBoundingBox(bb: BoundingBoxMinMaxType): Vector6 { return [ bb.min[0], bb.min[1], @@ -309,11 +307,13 @@ export function computeArrayFromBoundingBox(bb: BoundingBoxType): Vector6 { ]; } -export function computeShapeFromBoundingBox(bb: BoundingBoxType): Vector3 { +export function computeShapeFromBoundingBox(bb: BoundingBoxMinMaxType): Vector3 { return [bb.max[0] - bb.min[0], bb.max[1] - bb.min[1], bb.max[2] - bb.min[2]]; } -export function aggregateBoundingBox(boundingBoxes: Array): BoundingBoxType { +export function aggregateBoundingBox( + boundingBoxes: Array, +): BoundingBoxMinMaxType { if (boundingBoxes.length === 0) { return { min: [0, 0, 0], @@ -344,8 +344,8 @@ export function aggregateBoundingBox(boundingBoxes: Array): B } export function areBoundingBoxesOverlappingOrTouching( - firstBB: BoundingBoxType, - secondBB: BoundingBoxType, + firstBB: BoundingBoxMinMaxType, + secondBB: BoundingBoxMinMaxType, ) { let areOverlapping = true; @@ -425,10 +425,6 @@ export function stringToNumberArray(s: string): Array { return result; } -export function concatVector3(a: Vector3, b: Vector3): Vector6 { - return [a[0], a[1], a[2], b[0], b[1], b[2]]; -} - export function numberArrayToVector3(array: Array): Vector3 { const output: Vector3 = [0, 0, 0]; @@ -1262,7 +1258,11 @@ export function notEmpty(value: TValue | null | undefined): value is TVa export function isNumberMap(x: Map): x is Map { const { value } = x.entries().next(); - return Boolean(value && typeof value[0] === "number"); + if (value === undefined) { + // Let's assume a number map when the map is empty. + return true; + } + return Boolean(typeof value[0] === "number"); } export function isBigInt(x: NumberLike): x is bigint { @@ -1368,3 +1368,26 @@ export function areSetsEqual(setA: Set, setB: Set) { } return true; } + +// ColoredLogger can be used to make certain log outputs easier to find (especially useful +// when automatic logging of redux actions is enabled which makes the overall logging +// very verbose). +const chalk = new Chalk({ level: 3 }); +export const ColoredLogger = { + log: (...args: unknown[]) => { + // Simple wrapper to allow easy switching from colored to non-colored logs + console.log(...args); + }, + logRed: (str: string, ...args: unknown[]) => { + console.log(chalk.bgRed(str), ...args); + }, + logGreen: (str: string, ...args: unknown[]) => { + console.log(chalk.bgGreen(str), ...args); + }, + logYellow: (str: string, ...args: unknown[]) => { + console.log(chalk.bgYellow(str), ...args); + }, + logBlue: (str: string, ...args: unknown[]) => { + console.log(chalk.bgBlue(str), ...args); + }, +}; diff --git a/frontend/javascripts/libs/vector_input.tsx b/frontend/javascripts/libs/vector_input.tsx index e687e05f615..052ad7ac301 100644 --- a/frontend/javascripts/libs/vector_input.tsx +++ b/frontend/javascripts/libs/vector_input.tsx @@ -2,7 +2,7 @@ import type { InputProps } from "antd"; import * as Utils from "libs/utils"; import _ from "lodash"; import * as React from "react"; -import type { ServerBoundingBoxTypeTuple } from "types/api_types"; +import type { ServerBoundingBoxMinMaxTypeTuple } from "types/api_types"; import type { Vector3, Vector6 } from "viewer/constants"; import InputComponent from "viewer/view/components/input_component"; @@ -206,11 +206,11 @@ export class ArbitraryVectorInput extends BaseVector { } type BoundingBoxInputProps = Omit & { - value: ServerBoundingBoxTypeTuple; - onChange: (arg0: ServerBoundingBoxTypeTuple) => void; + value: ServerBoundingBoxMinMaxTypeTuple; + onChange: (arg0: ServerBoundingBoxMinMaxTypeTuple) => void; }; -function boundingBoxToVector6(value: ServerBoundingBoxTypeTuple): Vector6 { +function boundingBoxToVector6(value: ServerBoundingBoxMinMaxTypeTuple): Vector6 { const { topLeft, width, height, depth } = value; const [x, y, z] = topLeft; return [x, y, z, width, height, depth]; diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index 31b4532a504..6ae2c424434 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -11,14 +11,14 @@ import { createTreeMapFromTreeArray } from "viewer/model/reducers/skeletontracin import { diffTrees } from "viewer/model/sagas/skeletontracing_saga"; import { getNullableSkeletonTracing } from "viewer/model/accessors/skeletontracing_accessor"; import { getServerVolumeTracings } from "viewer/model/accessors/volumetracing_accessor"; -import { addVersionNumbers } from "viewer/model/sagas/save_saga"; -import * as UpdateActions from "viewer/model/sagas/update_actions"; +import * as UpdateActions from "viewer/model/sagas/volume/update_actions"; import * as api from "admin/rest_api"; import generateDummyTrees from "viewer/model/helpers/generate_dummy_trees"; import { describe, it, beforeAll, expect } from "vitest"; import { createSaveQueueFromUpdateActions } from "../helpers/saveHelpers"; import type { SaveQueueEntry } from "viewer/store"; import DiffableMap from "libs/diffable_map"; +import { addVersionNumbers } from "viewer/model/sagas/saving/save_queue_draining"; const datasetId = "59e9cfbdba632ac2ab8b23b3"; diff --git a/frontend/javascripts/test/fixtures/dataset_server_object.ts b/frontend/javascripts/test/fixtures/dataset_server_object.ts index 64d093cdfc1..b02b069b1ab 100644 --- a/frontend/javascripts/test/fixtures/dataset_server_object.ts +++ b/frontend/javascripts/test/fixtures/dataset_server_object.ts @@ -1,105 +1,125 @@ import { UnitLong } from "viewer/constants"; -import type { APIDataset } from "types/api_types"; +import type { APIColorLayer, APIDataset, APISegmentationLayer } from "types/api_types"; -const apiDataset: APIDataset = { - id: "66f3c82966010034942e9740", - name: "ROI2017_wkw", - dataSource: { - id: { - name: "ROI2017_wkw", - team: "Connectomics department", +const sampleColorLayer: APIColorLayer = { + name: "color", + category: "color", + boundingBox: { + topLeft: [0, 0, 0], + width: 10240, + height: 10240, + depth: 10240, + }, + resolutions: [ + [1, 1, 1], + [2, 2, 2], + [32, 32, 32], // unsorted on purpose + [4, 4, 4], + [8, 8, 8], + [16, 16, 16], + ], + elementClass: "uint8", + additionalAxes: [], +}; + +export const sampleHdf5AgglomerateName = "sampleHdf5Mapping"; +// this is a uint32 segmentation layer +const sampleSegmentationLayer: APISegmentationLayer = { + name: "segmentation", + category: "segmentation", + boundingBox: { + topLeft: [0, 0, 0], + width: 10240, + height: 10240, + depth: 10240, + }, + resolutions: [ + [1, 1, 1], + [2, 2, 2], + [32, 32, 32], // unsorted on purpose + [4, 4, 4], + [8, 8, 8], + [16, 16, 16], + ], + elementClass: "uint32", + largestSegmentId: 1000000000, + mappings: [ + "larger5um1", + "axons", + "astrocyte-ge-7", + "astrocyte", + "mitochondria", + "astrocyte-full", + ], + agglomerates: [sampleHdf5AgglomerateName], + tracingId: undefined, + additionalAxes: [], +}; + +// This is a segmentation layer object that could be directly +// inserted into the store. Do not use this object if it's intended +// to go through the normal model initialization because it does not +// have a fallback property. +export const sampleTracingLayer: APISegmentationLayer = { + ...sampleSegmentationLayer, + name: "volumeTracingId", + tracingId: "volumeTracingId", +}; + +function createDataset(dataLayers: Array): APIDataset { + return { + id: "66f3c82966010034942e9740", + name: "ROI2017_wkw", + dataSource: { + id: { + name: "ROI2017_wkw", + team: "Connectomics department", + }, + dataLayers, + scale: { factor: [11.239999771118164, 11.239999771118164, 28], unit: UnitLong.nm }, }, - dataLayers: [ + dataStore: { + name: "localhost", + url: "http://localhost:9000", + allowsUpload: true, + jobsSupportedByAvailableWorkers: [], + jobsEnabled: false, + }, + owningOrganization: "Connectomics department", + allowedTeams: [ { - name: "color", - category: "color", - boundingBox: { - topLeft: [0, 0, 0], - width: 10240, - height: 10240, - depth: 10240, - }, - resolutions: [ - [1, 1, 1], - [2, 2, 2], - [32, 32, 32], - [4, 4, 4], - [8, 8, 8], - [16, 16, 16], - ], - elementClass: "uint8", - additionalAxes: [], + id: "5b1e45f9a00000a000abc2c3", + name: "Connectomics department", + organization: "Connectomics department", }, + ], + allowedTeamsCumulative: [ { - name: "segmentation", - category: "segmentation", - boundingBox: { - topLeft: [0, 0, 0], - width: 10240, - height: 10240, - depth: 10240, - }, - resolutions: [ - [1, 1, 1], - [2, 2, 2], - [32, 32, 32], - [4, 4, 4], - [8, 8, 8], - [16, 16, 16], - ], - elementClass: "uint32", - largestSegmentId: 1000000000, - mappings: [ - "larger5um1", - "axons", - "astrocyte-ge-7", - "astrocyte", - "mitochondria", - "astrocyte-full", - ], - tracingId: undefined, - additionalAxes: [], + id: "5b1e45f9a00000a000abc2c3", + name: "Connectomics department", + organization: "Connectomics department", }, ], - scale: { factor: [11.239999771118164, 11.239999771118164, 28], unit: UnitLong.nm }, - }, - dataStore: { - name: "localhost", - url: "http://localhost:9000", - allowsUpload: true, - jobsSupportedByAvailableWorkers: [], - jobsEnabled: false, - }, - owningOrganization: "Connectomics department", - allowedTeams: [ - { - id: "5b1e45f9a00000a000abc2c3", - name: "Connectomics department", - organization: "Connectomics department", - }, - ], - allowedTeamsCumulative: [ - { - id: "5b1e45f9a00000a000abc2c3", - name: "Connectomics department", - organization: "Connectomics department", - }, - ], - isActive: true, - isPublic: false, - description: null, - created: 1502288550432, - isEditable: true, - directoryName: "ROI2017_wkw", - isUnreported: false, - tags: [], - folderId: "66f3c82466010002752e972c", - metadata: [], - logoUrl: "/assets/images/logo.svg", - lastUsedByUser: 1727268949322, - sortingKey: 1727252521746, - publication: null, - usedStorageBytes: 0, -}; + isActive: true, + isPublic: false, + description: null, + created: 1502288550432, + isEditable: true, + directoryName: "ROI2017_wkw", + isUnreported: false, + tags: [], + folderId: "66f3c82466010002752e972c", + metadata: [], + logoUrl: "/assets/images/logo.svg", + lastUsedByUser: 1727268949322, + sortingKey: 1727252521746, + publication: null, + usedStorageBytes: 0, + }; +} + +const apiDataset = createDataset([sampleColorLayer, sampleSegmentationLayer]); + +export const apiDatasetForVolumeTracing = createDataset([sampleSegmentationLayer]); export default apiDataset; diff --git a/frontend/javascripts/test/fixtures/dummy_organization.ts b/frontend/javascripts/test/fixtures/dummy_organization.ts index fd71ad4124e..34c84d7b05c 100644 --- a/frontend/javascripts/test/fixtures/dummy_organization.ts +++ b/frontend/javascripts/test/fixtures/dummy_organization.ts @@ -16,4 +16,19 @@ const dummyOrga: APIOrganization = { ownerName: undefined, }; +export const powerOrga: APIOrganization = { + id: "organizationId", + name: "Test Organization", + additionalInformation: "", + pricingPlan: PricingPlanEnum.Power, + enableAutoVerify: true, + newUserMailingList: "", + paidUntil: 0, + includedUsers: 1000, + includedStorageBytes: 10000, + usedStorageBytes: 0, + ownerName: undefined, + creditBalance: undefined, +}; + export default dummyOrga; diff --git a/frontend/javascripts/test/fixtures/hybridtracing_object.ts b/frontend/javascripts/test/fixtures/hybridtracing_object.ts index 263e4cc9549..df00a6dfcb8 100644 --- a/frontend/javascripts/test/fixtures/hybridtracing_object.ts +++ b/frontend/javascripts/test/fixtures/hybridtracing_object.ts @@ -1,7 +1,7 @@ import update from "immutability-helper"; import { TreeMap, type Tree } from "viewer/model/types/tree_types"; import type { SkeletonTracing } from "viewer/store"; -import { initialState as defaultState } from "test/fixtures/volumetracing_object"; +import { initialState as defaultVolumeState } from "test/fixtures/volumetracing_object"; import DiffableMap from "libs/diffable_map"; import EdgeCollection from "viewer/model/edge_collection"; @@ -9,7 +9,7 @@ import { MISSING_GROUP_ID } from "viewer/view/right-border-tabs/trees_tab/tree_h import { TreeTypeEnum } from "viewer/constants"; import type { APIColorLayer } from "types/api_types"; -const colorLayer: APIColorLayer = { +export const colorLayer: APIColorLayer = { name: "color", category: "color", boundingBox: { @@ -65,7 +65,7 @@ const initialTreeTwo: Tree = { export const initialSkeletonTracing: SkeletonTracing = { type: "skeleton", createdTimestamp: 0, - tracingId: "tracingId", + tracingId: "skeletonTracingId", trees: new TreeMap([ [1, initialTreeOne], [2, initialTreeTwo], @@ -85,16 +85,19 @@ export const initialSkeletonTracing: SkeletonTracing = { additionalAxes: [], }; -export const initialState = update(defaultState, { +export const initialState = update(defaultVolumeState, { annotation: { skeleton: { $set: initialSkeletonTracing, }, + readOnly: { + $set: null, + }, }, dataset: { dataSource: { dataLayers: { - $set: [...defaultState.dataset.dataSource.dataLayers, colorLayer], + $set: [...defaultVolumeState.dataset.dataSource.dataLayers, colorLayer], }, }, }, diff --git a/frontend/javascripts/test/fixtures/hybridtracing_server_objects.ts b/frontend/javascripts/test/fixtures/hybridtracing_server_objects.ts new file mode 100644 index 00000000000..bba15e9ba9c --- /dev/null +++ b/frontend/javascripts/test/fixtures/hybridtracing_server_objects.ts @@ -0,0 +1,89 @@ +import { + type APIAnnotation, + AnnotationLayerEnum, + type APITracingStoreAnnotation, +} from "types/api_types"; +import { tracing as skeletonTracing } from "./skeletontracing_server_objects"; +import { tracing as volumeTracing } from "./volumetracing_server_objects"; +import { ViewModeValues } from "viewer/constants"; + +export const tracings = [skeletonTracing, volumeTracing]; + +export const annotation: APIAnnotation = { + description: "", + datasetId: "66f3c82966010034942e9740", + state: "Active", + id: "598b52293c00009906f043e7", + visibility: "Internal", + modified: 1529066010230, + name: "", + teams: [], + typ: "Explorational", + task: null, + restrictions: { + allowAccess: true, + allowUpdate: true, + allowFinish: true, + allowDownload: true, + allowSave: true, + }, + annotationLayers: [ + { + name: AnnotationLayerEnum.Skeleton, + tracingId: skeletonTracing.id, + typ: AnnotationLayerEnum.Skeleton, + stats: {}, + }, + { + name: AnnotationLayerEnum.Volume, + tracingId: volumeTracing.id, + typ: AnnotationLayerEnum.Volume, + stats: {}, + }, + ], + dataSetName: "ROI2017_wkw", + organization: "Connectomics Department", + dataStore: { + name: "localhost", + url: "http://localhost:9000", + allowsUpload: true, + jobsEnabled: false, + jobsSupportedByAvailableWorkers: [], + }, + tracingStore: { + name: "localhost", + url: "http://localhost:9000", + }, + settings: { + allowedModes: ViewModeValues, + branchPointsAllowed: true, + somaClickingAllowed: true, + volumeInterpolationAllowed: false, + mergerMode: false, + magRestrictions: {}, + }, + tags: ["ROI2017_wkw", "skeleton"], + tracingTime: 0, + contributors: [], + othersMayEdit: false, + isLockedByOwner: false, +}; + +export const annotationProto: APITracingStoreAnnotation = { + description: "hybrid-annotation-description", + version: 1, + earliestAccessibleVersion: 0, + annotationLayers: [ + { + tracingId: skeletonTracing.id, + name: "skeleton layer name", + typ: AnnotationLayerEnum.Skeleton, + }, + { + tracingId: volumeTracing.id, + name: "volume layer name", + typ: AnnotationLayerEnum.Volume, + }, + ], + userStates: [], +}; diff --git a/frontend/javascripts/test/fixtures/skeletontracing_object.ts b/frontend/javascripts/test/fixtures/skeletontracing_object.ts new file mode 100644 index 00000000000..559f08d0fcc --- /dev/null +++ b/frontend/javascripts/test/fixtures/skeletontracing_object.ts @@ -0,0 +1,21 @@ +import update from "immutability-helper"; +import { initialSkeletonTracing, colorLayer } from "./hybridtracing_object"; +import defaultState from "viewer/default_state"; + +export const initialState = update(defaultState, { + annotation: { + skeleton: { + $set: initialSkeletonTracing, + }, + readOnly: { + $set: null, + }, + }, + dataset: { + dataSource: { + dataLayers: { + $set: [colorLayer], + }, + }, + }, +}); diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index 30ba7d81d3c..ac22411b542 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -4,6 +4,7 @@ import { AnnotationLayerEnum, type APITracingStoreAnnotation, } from "types/api_types"; +import { ViewModeValues } from "viewer/constants"; const TRACING_ID = "skeletonTracingId-47e37793-d0be-4240-a371-87ce68561a13"; @@ -205,7 +206,7 @@ export const annotation: APIAnnotation = { url: "http://localhost:9000", }, settings: { - allowedModes: ["orthogonal", "oblique", "flight"], + allowedModes: ViewModeValues, branchPointsAllowed: true, somaClickingAllowed: true, volumeInterpolationAllowed: false, diff --git a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts index 3cb558c379a..9d00df3ac9e 100644 --- a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts @@ -4,6 +4,7 @@ import { AnnotationLayerEnum, type APITracingStoreAnnotation, } from "types/api_types"; +import { ViewModeValues } from "viewer/constants"; const TRACING_ID = "skeletonTracingId-e90133de-b2db-4912-8261-8b6f84f7edab"; export const tracing: ServerSkeletonTracing = { @@ -88,7 +89,7 @@ export const annotation: APIAnnotation = { teamId: "teamId-5b1e45f9a00000a000abc2c3", teamName: "Connectomics department", settings: { - allowedModes: ["orthogonal", "oblique", "flight"], + allowedModes: ViewModeValues, branchPointsAllowed: true, somaClickingAllowed: true, volumeInterpolationAllowed: false, @@ -146,7 +147,7 @@ export const annotation: APIAnnotation = { }, visibility: "Internal", settings: { - allowedModes: ["orthogonal", "oblique", "flight"], + allowedModes: ViewModeValues, branchPointsAllowed: true, somaClickingAllowed: true, volumeInterpolationAllowed: false, diff --git a/frontend/javascripts/test/fixtures/volumetracing_object.ts b/frontend/javascripts/test/fixtures/volumetracing_object.ts index b0b515eb170..1709c6432e0 100644 --- a/frontend/javascripts/test/fixtures/volumetracing_object.ts +++ b/frontend/javascripts/test/fixtures/volumetracing_object.ts @@ -1,26 +1,26 @@ import update from "immutability-helper"; -import { AnnotationTool } from "viewer/model/accessors/tool_accessor"; import Constants from "viewer/constants"; import defaultState from "viewer/default_state"; +import { combinedReducer } from "viewer/store"; +import { setDatasetAction } from "viewer/model/actions/dataset_actions"; +import { convertFrontendBoundingBoxToServer } from "viewer/model/reducers/reducer_helpers"; +import { apiDatasetForVolumeTracing } from "./dataset_server_object"; +import { tracing as serverVolumeTracing } from "./volumetracing_server_objects"; +import { serverVolumeToClientVolumeTracing } from "viewer/model/reducers/volumetracing_reducer"; +import { preprocessDataset } from "viewer/model_initialization"; export const VOLUME_TRACING_ID = "volumeTracingId"; -const volumeTracing = { - type: "volume", - activeCellId: 0, - activeTool: AnnotationTool.MOVE, - largestSegmentId: 0, - contourList: [], - lastLabelActions: [], - tracingId: VOLUME_TRACING_ID, -}; +const volumeTracing = serverVolumeToClientVolumeTracing(serverVolumeTracing, null, null); + const notEmptyViewportRect = { top: 0, left: 0, width: Constants.VIEWPORT_WIDTH, height: Constants.VIEWPORT_WIDTH, }; -export const initialState = update(defaultState, { + +const stateWithoutDatasetInitialization = update(defaultState, { annotation: { annotationType: { $set: "Explorational", @@ -31,22 +31,25 @@ export const initialState = update(defaultState, { restrictions: { $set: { branchPointsAllowed: true, + initialAllowUpdate: true, allowUpdate: true, allowFinish: true, allowAccess: true, allowDownload: true, + allowedModes: [], + somaClickingAllowed: true, + volumeInterpolationAllowed: true, + mergerMode: false, magRestrictions: { - // @ts-expect-error ts-migrate(2322) FIXME: Type 'null' is not assignable to type 'number | un... Remove this comment to see the full error message - min: null, - // @ts-expect-error ts-migrate(2322) FIXME: Type 'null' is not assignable to type 'number | un... Remove this comment to see the full error message - max: null, + min: undefined, + max: undefined, }, }, }, volumes: { - // @ts-expect-error ts-migrate(2322) FIXME: Type '{ type: string; activeCellId: number; active... Remove this comment to see the full error message $set: [volumeTracing], }, + readOnly: { $set: null }, }, dataset: { dataSource: { @@ -61,12 +64,12 @@ export const initialState = update(defaultState, { [4, 4, 4], ], category: "segmentation", + largestSegmentId: volumeTracing.largestSegmentId ?? 0, elementClass: "uint32", name: volumeTracing.tracingId, tracingId: volumeTracing.tracingId, - // @ts-expect-error ts-migrate(2322) FIXME: Type '{ resolutions: [number, number, number][]; c... Remove this comment to see the full error message - isDisabled: false, - alpha: 100, + additionalAxes: [], + boundingBox: convertFrontendBoundingBoxToServer(volumeTracing.boundingBox!), }, ], }, @@ -108,3 +111,8 @@ export const initialState = update(defaultState, { }, }, }); + +export const initialState = combinedReducer( + stateWithoutDatasetInitialization, + setDatasetAction(preprocessDataset(apiDatasetForVolumeTracing, [serverVolumeTracing])), +); diff --git a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts index dd38749c75e..e8b22c733b0 100644 --- a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts @@ -5,7 +5,9 @@ import { type APITracingStoreAnnotation, } from "types/api_types"; -const TRACING_ID = "volumeTracingId-1234"; +const TRACING_ID = "volumeTracingId"; + +// this is a uint16 segmentation layer export const tracing: ServerVolumeTracing = { typ: "Volume", activeSegmentId: 10000, @@ -36,43 +38,21 @@ export const tracing: ServerVolumeTracing = { }, additionalAxes: [], elementClass: "uint16", - id: "segmentation", + id: TRACING_ID, largestSegmentId: 21890, zoomLevel: 0, mags: [ - { - x: 1, - y: 1, - z: 1, - }, - { - x: 2, - y: 2, - z: 2, - }, - { - x: 4, - y: 4, - z: 4, - }, - { - x: 8, - y: 8, - z: 8, - }, - { - x: 16, - y: 16, - z: 16, - }, - { - x: 32, - y: 32, - z: 32, - }, + { x: 1, y: 1, z: 1 }, + { x: 2, y: 2, z: 2 }, + { x: 4, y: 4, z: 4 }, + { x: 8, y: 8, z: 8 }, + { x: 16, y: 16, z: 16 }, + { x: 32, y: 32, z: 32 }, ], userStates: [], + fallbackLayer: "segmentation", }; + export const annotation: APIAnnotation = { datasetId: "66f3c82966010034942e9740", description: "", @@ -92,7 +72,7 @@ export const annotation: APIAnnotation = { }, annotationLayers: [ { - name: "volume", + name: "some volume name", tracingId: TRACING_ID, typ: AnnotationLayerEnum.Volume, stats: {}, @@ -125,6 +105,7 @@ export const annotation: APIAnnotation = { othersMayEdit: false, isLockedByOwner: false, }; + export const annotationProto: APITracingStoreAnnotation = { description: "volume-annotation-description", version: 1, @@ -132,7 +113,7 @@ export const annotationProto: APITracingStoreAnnotation = { annotationLayers: [ { tracingId: TRACING_ID, - name: "volume", + name: "some volume name", typ: AnnotationLayerEnum.Volume, }, ], diff --git a/frontend/javascripts/test/global_mocks.ts b/frontend/javascripts/test/global_mocks.ts index 46e400848e5..e41c47d4630 100644 --- a/frontend/javascripts/test/global_mocks.ts +++ b/frontend/javascripts/test/global_mocks.ts @@ -16,8 +16,8 @@ vi.mock("libs/keyboard", () => ({ vi.mock("libs/toast", () => ({ default: { - error: vi.fn(), - warning: vi.fn(), + error: vi.fn((msg) => console.error(msg)), + warning: vi.fn((msg) => console.warn(msg)), close: vi.fn(), success: vi.fn(), info: vi.fn(), @@ -123,10 +123,12 @@ vi.mock("antd", () => { Dropdown: {}, message: { hide: vi.fn(), + destroy: vi.fn(), // These return a "hide function" show: vi.fn(() => () => {}), loading: vi.fn(() => () => {}), success: vi.fn(() => () => {}), + error: vi.fn(() => () => {}), }, Modal: { confirm: vi.fn(), diff --git a/frontend/javascripts/test/helpers/apiHelpers.ts b/frontend/javascripts/test/helpers/apiHelpers.ts index e6545b3bd99..c7eec41556c 100644 --- a/frontend/javascripts/test/helpers/apiHelpers.ts +++ b/frontend/javascripts/test/helpers/apiHelpers.ts @@ -1,6 +1,6 @@ import { vi, type TestContext as BaseTestContext } from "vitest"; import _ from "lodash"; -import { ControlModeEnum } from "viewer/constants"; +import Constants, { ControlModeEnum, type Vector2 } from "viewer/constants"; import { sleep } from "libs/utils"; import dummyUser from "test/fixtures/dummy_user"; import dummyOrga from "test/fixtures/dummy_organization"; @@ -20,7 +20,7 @@ import { annotation as VOLUME_ANNOTATION, annotationProto as VOLUME_ANNOTATION_PROTO, } from "../fixtures/volumetracing_server_objects"; -import DATASET from "../fixtures/dataset_server_object"; +import DATASET, { sampleHdf5AgglomerateName } from "../fixtures/dataset_server_object"; import type { ApiInterface } from "viewer/api/api_latest"; import type { ModelType } from "viewer/model"; @@ -29,7 +29,7 @@ import Model from "viewer/model"; import UrlManager from "viewer/controller/url_manager"; import WebknossosApi from "viewer/api/api_loader"; -import { type SaveQueueEntry, default as Store, startSaga } from "viewer/store"; +import { type NumberLike, type SaveQueueEntry, default as Store, startSaga } from "viewer/store"; import rootSaga from "viewer/model/sagas/root_saga"; import { setStore, setModel } from "viewer/singletons"; import { setupApi } from "viewer/api/internal_api"; @@ -37,9 +37,20 @@ import { setActiveOrganizationAction } from "viewer/model/actions/organization_a import Request, { type RequestOptions } from "libs/request"; import { parseProtoAnnotation, parseProtoTracing } from "viewer/model/helpers/proto_helpers"; import app from "app"; -import { sendSaveRequestWithToken } from "admin/rest_api"; +import { + getDataset, + getEdgesForAgglomerateMinCut, + sendSaveRequestWithToken, + type MinCutTargetEdge, +} from "admin/rest_api"; import { resetStoreAction, restartSagaAction, wkReadyAction } from "viewer/model/actions/actions"; import { setActiveUserAction } from "viewer/model/actions/user_actions"; +import { + tracings as HYBRID_TRACINGS, + annotation as HYBRID_ANNOTATION, + annotationProto as HYBRID_ANNOTATION_PROTO, +} from "test/fixtures/hybridtracing_server_objects"; +import type { ServerTracing } from "types/api_types"; const TOKEN = "secure-token"; const ANNOTATION_TYPE = "annotationTypeValue"; @@ -50,6 +61,8 @@ export interface WebknossosTestContext extends BaseTestContext { model: ModelType; mocks: { Request: typeof Request; + getCurrentMappingEntriesFromServer: typeof getCurrentMappingEntriesFromServer; + getEdgesForAgglomerateMinCut: typeof getEdgesForAgglomerateMinCut; }; setSlowCompression: (enabled: boolean) => void; api: ApiInterface; @@ -71,6 +84,10 @@ vi.mock("libs/request", () => ({ }, })); +const getCurrentMappingEntriesFromServer = vi.fn((): Array<[number, number]> => { + return []; +}); + vi.mock("admin/rest_api.ts", async () => { const actual = await vi.importActual("admin/rest_api.ts"); @@ -80,9 +97,62 @@ vi.mock("admin/rest_api.ts", async () => { return Promise.resolve(); }); (mockedSendRequestWithToken as any).receivedDataPerSaveRequest = receivedDataPerSaveRequest; + + const getAgglomeratesForSegmentsImpl = async (segmentIds: Array) => { + const segmentIdSet = new Set(segmentIds); + const entries = getCurrentMappingEntriesFromServer().filter(([id]) => + segmentIdSet.has(id), + ) as Vector2[]; + if (entries.length < segmentIdSet.size) { + throw new Error( + "Incorrect mock implementation of getAgglomeratesForSegmentsImpl detected. The requested segment ids were not properly served.", + ); + } + return new Map(entries); + }; + const getAgglomeratesForSegmentsFromDatastoreMock = vi.fn( + ( + _dataStoreUrl: string, + _dataSourceId: unknown, + _layerName: string, + _mappingId: string, + segmentIds: Array, + ) => { + return getAgglomeratesForSegmentsImpl(segmentIds); + }, + ); + + const getAgglomeratesForSegmentsFromTracingstoreMock = vi.fn( + ( + _tracingStoreUrl: string, + _tracingId: string, + segmentIds: Array, + _annotationId: string, + _version?: number | null | undefined, + ) => { + return getAgglomeratesForSegmentsImpl(segmentIds); + }, + ); + return { ...actual, + getDataset: vi.fn(), sendSaveRequestWithToken: mockedSendRequestWithToken, + getAgglomeratesForDatasetLayer: vi.fn(() => [sampleHdf5AgglomerateName]), + getAgglomeratesForSegmentsFromTracingstore: getAgglomeratesForSegmentsFromTracingstoreMock, + getAgglomeratesForSegmentsFromDatastore: getAgglomeratesForSegmentsFromDatastoreMock, + getEdgesForAgglomerateMinCut: vi.fn( + ( + _tracingStoreUrl: string, + _tracingId: string, + _segmentsInfo: unknown, + ): Promise> => { + // This simply serves as a preparation so that specs can mock the function + // when needed. Without this stub, it's harder to mock this specific function + // later. + throw new Error("No test has mocked the return value yet here."); + }, + ), }; }); @@ -116,10 +186,6 @@ function receiveJSONMockImplementation( return Promise.resolve({}); } - if (url === `/api/datasets/${annotationFixture.datasetId}`) { - return Promise.resolve(_.cloneDeep(DATASET)); - } - if (url === "/api/userToken/generate" && options && options.method === "POST") { return Promise.resolve({ token: TOKEN, @@ -147,13 +213,36 @@ vi.mock("viewer/model/bucket_data_handling/data_rendering_logic", async (importO }; }); -export function createBucketResponseFunction(TypedArrayClass: any, fillValue: number, delay = 0) { +type Override = { + position: [number, number, number]; // [x, y, z] + value: number; +}; + +export function createBucketResponseFunction( + TypedArrayClass: any, + fillValue: number, + delay = 0, + overrides: Override[] = [], +) { return async function getBucketData(_url: string, payload: { data: Array }) { - const bucketCount = payload.data.length; await sleep(delay); + const bucketCount = payload.data.length; + const typedArray = new TypedArrayClass(bucketCount * 32 ** 3).fill(fillValue); + + for (let bucketIdx = 0; bucketIdx < bucketCount; bucketIdx++) { + for (const { position, value } of overrides) { + const [x, y, z] = position; + const indexInBucket = + bucketIdx * Constants.BUCKET_WIDTH ** 3 + + z * Constants.BUCKET_WIDTH ** 2 + + y * Constants.BUCKET_WIDTH + + x; + typedArray[indexInBucket] = value; + } + } + return { - buffer: new Uint8Array(new TypedArrayClass(bucketCount * 32 ** 3).fill(fillValue).buffer) - .buffer, + buffer: new Uint8Array(typedArray.buffer).buffer, headers: { "missing-buckets": "[]", }, @@ -171,17 +260,26 @@ vi.mock("libs/keyboard", () => ({ const modelData = { skeleton: { - tracing: SKELETON_TRACING, + dataset: DATASET, + tracings: [SKELETON_TRACING], annotation: SKELETON_ANNOTATION, annotationProto: SKELETON_ANNOTATION_PROTO, }, volume: { - tracing: VOLUME_TRACING, + dataset: DATASET, + tracings: [VOLUME_TRACING], annotation: VOLUME_ANNOTATION, annotationProto: VOLUME_ANNOTATION_PROTO, }, + hybrid: { + dataset: DATASET, + tracings: HYBRID_TRACINGS, + annotation: HYBRID_ANNOTATION, + annotationProto: HYBRID_ANNOTATION_PROTO, + }, task: { - tracing: TASK_TRACING, + dataset: DATASET, + tracings: [TASK_TRACING], annotation: TASK_ANNOTATION, annotationProto: TASK_ANNOTATION_PROTO, }, @@ -211,7 +309,9 @@ export async function setupWebknossosForTesting( testContext.model = Model; testContext.mocks = { - Request, + Request: vi.mocked(Request), + getCurrentMappingEntriesFromServer, + getEdgesForAgglomerateMinCut, }; testContext.setSlowCompression = setSlowCompression; testContext.tearDownPullQueues = () => @@ -223,14 +323,32 @@ export async function setupWebknossosForTesting( ).receivedDataPerSaveRequest; const webknossos = new WebknossosApi(Model); - const annotationFixture = modelData[mode].annotation; + const { tracings, annotationProto, dataset, annotation } = modelData[mode]; vi.mocked(Request).receiveJSON.mockImplementation((url, options) => - receiveJSONMockImplementation(url, options, annotationFixture), + receiveJSONMockImplementation(url, options, annotation), ); - vi.mocked(parseProtoTracing).mockReturnValue(_.cloneDeep(modelData[mode].tracing)); - vi.mocked(parseProtoAnnotation).mockReturnValue(_.cloneDeep(modelData[mode].annotationProto)); + vi.mocked(getDataset).mockImplementation( + async ( + _datasetId: string, + _sharingToken?: string | null | undefined, + _options: RequestOptions = {}, + ) => { + return _.cloneDeep(dataset); + }, + ); + + vi.mocked(parseProtoTracing).mockImplementation( + (_buffer: ArrayBuffer, annotationType: "skeleton" | "volume"): ServerTracing => { + const tracing = tracings.find((tracing) => tracing.typ.toLowerCase() === annotationType); + if (tracing == null) { + throw new Error(`Could not find tracing for ${annotationType}.`); + } + return tracing; + }, + ); + vi.mocked(parseProtoAnnotation).mockReturnValue(_.cloneDeep(annotationProto)); setSceneController({ name: "This is a dummy scene controller so that getSceneController works in the tests.", diff --git a/frontend/javascripts/test/helpers/chainReducer.ts b/frontend/javascripts/test/helpers/chainReducer.ts index 3269fbe5f31..892a70392ba 100644 --- a/frontend/javascripts/test/helpers/chainReducer.ts +++ b/frontend/javascripts/test/helpers/chainReducer.ts @@ -1,9 +1,16 @@ -export function chainReduce(reducer: (arg0: S, arg1: A) => S) { - return (state: S, actionGetters: Array A)>) => { +type ReducerFn = (s: State, arg1: Action) => State; + +export function chainReduce(reducer: ReducerFn) { + /* + * Given a reducer, chainReduce returns a function which accepts a state and + * an array of actions (or action getters). When invoked, that function will + * use the reducer to apply all actions on the initial state. + */ + return (state: State, actionGetters: Array Action)>) => { return actionGetters.reduce((currentState, actionGetter) => { - const action: A = + const action: Action = typeof actionGetter === "function" - ? (actionGetter as (state: S) => A)(currentState) + ? (actionGetter as (state: State) => Action)(currentState) : actionGetter; return reducer(currentState, action); }, state); diff --git a/frontend/javascripts/test/helpers/saveHelpers.ts b/frontend/javascripts/test/helpers/saveHelpers.ts index 3ad197c7a23..f234f070e84 100644 --- a/frontend/javascripts/test/helpers/saveHelpers.ts +++ b/frontend/javascripts/test/helpers/saveHelpers.ts @@ -1,5 +1,5 @@ import type { TracingStats } from "viewer/model/accessors/annotation_accessor"; -import type { UpdateActionWithoutIsolationRequirement } from "viewer/model/sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "viewer/model/sagas/volume/update_actions"; import type { SaveQueueEntry } from "viewer/store"; import { idUserA } from "test/e2e-setup"; import dummyUser from "test/fixtures/dummy_user"; @@ -23,15 +23,11 @@ export function createSaveQueueFromUpdateActions( })); } -export function withoutUpdateTracing( +export function withoutUpdateActiveItemTracing( items: UpdateActionWithoutIsolationRequirement[], ): UpdateActionWithoutIsolationRequirement[] { return items.filter( - (item) => - item.name !== "updateSkeletonTracing" && - item.name !== "updateVolumeTracing" && - item.name !== "updateActiveNode" && - item.name !== "updateActiveSegmentId", + (item) => item.name !== "updateActiveNode" && item.name !== "updateActiveSegmentId", ); } diff --git a/frontend/javascripts/test/helpers/utils.ts b/frontend/javascripts/test/helpers/utils.ts new file mode 100644 index 00000000000..636831a910b --- /dev/null +++ b/frontend/javascripts/test/helpers/utils.ts @@ -0,0 +1,29 @@ +import update from "immutability-helper"; +import type { WebknossosState } from "viewer/store"; + +export const transformStateAsReadOnly = ( + state: WebknossosState, + transformFn: (state: WebknossosState) => WebknossosState, +) => { + /* + * This function can be used to make a state read only before + * transforming it somehow (e.g., with a reducer). The result of + * the transformation is then made not-read-only again. + */ + const readOnlyState = overrideAllowUpdateInState(state, false); + const transformedState = transformFn(readOnlyState); + + return overrideAllowUpdateInState(transformedState, true); +}; + +function overrideAllowUpdateInState(state: WebknossosState, value: boolean) { + return update(state, { + annotation: { + restrictions: { + allowUpdate: { + $set: value, + }, + }, + }, + }); +} diff --git a/frontend/javascripts/test/libs/nml.spec.ts b/frontend/javascripts/test/libs/nml.spec.ts index bedd8113a09..391cb4cbaef 100644 --- a/frontend/javascripts/test/libs/nml.spec.ts +++ b/frontend/javascripts/test/libs/nml.spec.ts @@ -31,7 +31,7 @@ const createDummyNode = (id: number): Node => ({ const initialSkeletonTracing: SkeletonTracing = { type: "skeleton", createdTimestamp: 0, - tracingId: "tracingId", + tracingId: "skeletonTracingId", cachedMaxNodeId: 7, trees: new DiffableMap([ [ @@ -769,7 +769,7 @@ describe("NML", () => { expect(newSkeletonTracing.trees.getOrThrow(4).nodes.size()).toBe(3); expect(newSkeletonTracing.trees.getOrThrow(4).nodes.getOrThrow(12).id).toBe(12); - const getSortedEdges = (edges: EdgeCollection) => _.sortBy(edges.asArray(), "source"); + const getSortedEdges = (edges: EdgeCollection) => _.sortBy(edges.toArray(), "source"); // And node ids in edges, branchpoints and comments should have been replaced expect(getSortedEdges(newSkeletonTracing.trees.getOrThrow(3).edges)).toEqual([ diff --git a/frontend/javascripts/test/model/cuckoo_table.spec.ts b/frontend/javascripts/test/model/cuckoo_table.spec.ts index 8d1f6a1feb8..bd1e9521cb2 100644 --- a/frontend/javascripts/test/model/cuckoo_table.spec.ts +++ b/frontend/javascripts/test/model/cuckoo_table.spec.ts @@ -106,7 +106,7 @@ describe("CuckooTableVec3", () => { }).toThrow(); }); - it("Maxing out capacity", { timeout: 20000 }, () => { + it("Maxing out capacity", { timeout: 25000 }, () => { const textureWidth = 128; const attemptCount = 10; for (let attempt = 0; attempt < attemptCount; attempt++) { diff --git a/frontend/javascripts/test/model/cuckoo_table_uint32.spec.ts b/frontend/javascripts/test/model/cuckoo_table_uint32.spec.ts index c74a2e9992e..5997bf30cd5 100644 --- a/frontend/javascripts/test/model/cuckoo_table_uint32.spec.ts +++ b/frontend/javascripts/test/model/cuckoo_table_uint32.spec.ts @@ -27,7 +27,7 @@ function isValueEqual(val1: Value, val2: Value | null) { } describe("CuckooTableUint32", () => { - it("Maxing out capacity", { timeout: 20000 }, () => { + it("Maxing out capacity", { timeout: 25000 }, () => { const textureWidth = 128; const attemptCount = 10; diff --git a/frontend/javascripts/test/model/cuckoo_table_uint64.spec.ts b/frontend/javascripts/test/model/cuckoo_table_uint64.spec.ts index 1a53df53593..5510609a5ae 100644 --- a/frontend/javascripts/test/model/cuckoo_table_uint64.spec.ts +++ b/frontend/javascripts/test/model/cuckoo_table_uint64.spec.ts @@ -29,7 +29,7 @@ function isValueEqual(val1: Value, val2: Value | null) { } describe("CuckooTableUint64", () => { - it("Maxing out capacity", { timeout: 20000 }, () => { + it("Maxing out capacity", { timeout: 25000 }, () => { const textureWidth = 128; const attemptCount = 10; for (let attempt = 0; attempt < attemptCount; attempt++) { diff --git a/frontend/javascripts/test/model/cuckoo_table_vec5.spec.ts b/frontend/javascripts/test/model/cuckoo_table_vec5.spec.ts index 76d17df5665..7fa9885e27a 100644 --- a/frontend/javascripts/test/model/cuckoo_table_vec5.spec.ts +++ b/frontend/javascripts/test/model/cuckoo_table_vec5.spec.ts @@ -116,7 +116,7 @@ describe("CuckooTableVec5", () => { }).toThrow(); }); - it("Maxing out capacity", { timeout: 20000 }, () => { + it("Maxing out capacity", { timeout: 25000 }, () => { const textureWidth = 128; const attemptCount = 10; for (let attempt = 0; attempt < attemptCount; attempt++) { diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index de9a5cb22d6..f5d1a2f2226 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -2,11 +2,11 @@ import { vi, describe, it, expect } from "vitest"; import dummyUser from "test/fixtures/dummy_user"; import type { WebknossosState } from "viewer/store"; import { createSaveQueueFromUpdateActions } from "../helpers/saveHelpers"; -import type { UpdateActionWithoutIsolationRequirement } from "viewer/model/sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "viewer/model/sagas/volume/update_actions"; import * as SaveActions from "viewer/model/actions/save_actions"; import SaveReducer from "viewer/model/reducers/save_reducer"; -import { createEdge } from "viewer/model/sagas/update_actions"; +import { createEdge } from "viewer/model/sagas/volume/update_actions"; import { TIMESTAMP } from "test/global_mocks"; vi.mock("viewer/model/accessors/annotation_accessor", () => ({ diff --git a/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts b/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts index 6d95071500e..2356c8b0ea1 100644 --- a/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts @@ -22,6 +22,7 @@ import { type Tree, MutableTreeMap, } from "viewer/model/types/tree_types"; +import { deleteNodeAsUserAction } from "viewer/model/actions/skeletontracing_actions_with_effects"; const initialState: WebknossosState = update(defaultState, { annotation: { @@ -112,7 +113,7 @@ describe("SkeletonTracing", () => { expect(newSkeletonTracing.activeNodeId).toBe(3); expect(newSkeletonTracing.trees.getOrThrow(1).nodes.size()).toEqual(3); expect(newSkeletonTracing.trees.getOrThrow(1).edges.size()).toEqual(2); - expect(newSkeletonTracing.trees.getOrThrow(1).edges.asArray()).toEqual([ + expect(newSkeletonTracing.trees.getOrThrow(1).edges.toArray()).toEqual([ { source: 1, target: 2, @@ -152,7 +153,7 @@ describe("SkeletonTracing", () => { expect(newSkeletonTracing.trees.getOrThrow(2).nodes.size()).toEqual(0); expect(newSkeletonTracing.trees.getOrThrow(3).nodes.size()).toEqual(2); expect(newSkeletonTracing.trees.getOrThrow(2).edges.size()).toEqual(0); - expect(newSkeletonTracing.trees.getOrThrow(3).edges.asArray()).toEqual([ + expect(newSkeletonTracing.trees.getOrThrow(3).edges.toArray()).toEqual([ { source: 2, target: 3, @@ -170,7 +171,7 @@ describe("SkeletonTracing", () => { }); it("should delete the tree if 'delete node as user' is initiated for an empty tree", () => { - const { createTreeAction, deleteNodeAsUserAction } = SkeletonTracingActions; + const { createTreeAction } = SkeletonTracingActions; const newState = applyActions(initialStateWithActiveTreeId2, [ createTreeAction(), (currentState: WebknossosState) => deleteNodeAsUserAction(currentState), @@ -1023,7 +1024,7 @@ describe("SkeletonTracing", () => { const newSkeletonTracing = enforceSkeletonTracing(newState.annotation); expect(newSkeletonTracing.trees.size()).toBe(2); expect(newSkeletonTracing.trees.getOrThrow(3).nodes.size()).toBe(4); - expect(newSkeletonTracing.trees.getOrThrow(3).edges.asArray()).toEqual([ + expect(newSkeletonTracing.trees.getOrThrow(3).edges.toArray()).toEqual([ { source: 2, target: 3, @@ -1089,7 +1090,7 @@ describe("SkeletonTracing", () => { const newSkeletonTracing = enforceSkeletonTracing(newState.annotation); expect(newSkeletonTracing.trees.size()).toBe(2); expect(newSkeletonTracing.trees.getOrThrow(3).nodes.size()).toBe(4); - expect(newSkeletonTracing.trees.getOrThrow(3).edges.asArray()).toEqual([ + expect(newSkeletonTracing.trees.getOrThrow(3).edges.toArray()).toEqual([ { source: 2, target: 3, diff --git a/frontend/javascripts/test/reducers/update_action_application/skeleton.spec.ts b/frontend/javascripts/test/reducers/update_action_application/skeleton.spec.ts new file mode 100644 index 00000000000..45c75fcd9b0 --- /dev/null +++ b/frontend/javascripts/test/reducers/update_action_application/skeleton.spec.ts @@ -0,0 +1,306 @@ +import update from "immutability-helper"; +import _ from "lodash"; +import { sampleTracingLayer } from "test/fixtures/dataset_server_object"; +import { initialState as defaultSkeletonState } from "test/fixtures/skeletontracing_object"; +import { chainReduce } from "test/helpers/chainReducer"; +import { withoutUpdateActiveItemTracing } from "test/helpers/saveHelpers"; +import { transformStateAsReadOnly } from "test/helpers/utils"; +import type { Vector3 } from "viewer/constants"; +import { + enforceSkeletonTracing, + getActiveNode, + getActiveTree, +} from "viewer/model/accessors/skeletontracing_accessor"; +import type { Action } from "viewer/model/actions/actions"; +import { + addUserBoundingBoxAction, + changeUserBoundingBoxAction, + deleteUserBoundingBoxAction, +} from "viewer/model/actions/annotation_actions"; +import * as SkeletonTracingActions from "viewer/model/actions/skeletontracing_actions"; +import { setActiveUserBoundingBoxId } from "viewer/model/actions/ui_actions"; +import compactUpdateActions from "viewer/model/helpers/compaction/compact_update_actions"; +import { diffSkeletonTracing } from "viewer/model/sagas/skeletontracing_saga"; +import type { + ApplicableSkeletonUpdateAction, + UpdateActionWithoutIsolationRequirement, +} from "viewer/model/sagas/volume/update_actions"; +import { combinedReducer, type WebknossosState } from "viewer/store"; +import { makeBasicGroupObject } from "viewer/view/right-border-tabs/trees_tab/tree_hierarchy_view_helpers"; +import { describe, expect, test, it, afterAll } from "vitest"; + +const initialState: WebknossosState = update(defaultSkeletonState, { + annotation: { + restrictions: { + allowUpdate: { + $set: true, + }, + branchPointsAllowed: { + $set: true, + }, + }, + annotationType: { $set: "Explorational" }, + }, + dataset: { + dataSource: { + dataLayers: { + $set: [sampleTracingLayer], + }, + }, + }, +}); + +const position = [10, 10, 10] as Vector3; +const rotation = [0.5, 0.5, 0.5] as Vector3; +const viewport = 0; +const mag = 0; + +const applyActions = chainReduce(combinedReducer); + +// This helper dict exists so that we can ensure via typescript that +// the list contains all members of ApplicableSkeletonUpdateAction. As soon as +// ApplicableSkeletonUpdateAction is extended with another action, TS will complain +// if the following dictionary doesn't contain that action. +const actionNamesHelper: Record = { + updateTree: true, + createTree: true, + updateNode: true, + createNode: true, + createEdge: true, + deleteTree: true, + deleteEdge: true, + deleteNode: true, + moveTreeComponent: true, + updateTreeGroups: true, + updateTreeGroupsExpandedState: true, + updateTreeEdgesVisibility: true, + addUserBoundingBoxInSkeletonTracing: true, + updateUserBoundingBoxInSkeletonTracing: true, + updateUserBoundingBoxVisibilityInSkeletonTracing: true, + deleteUserBoundingBoxInSkeletonTracing: true, +}; +const actionNamesList = Object.keys(actionNamesHelper); + +describe("Update Action Application for SkeletonTracing", () => { + const seenActionTypes = new Set(); + + let idx = 0; + const createNode = () => + SkeletonTracingActions.createNodeAction([10, 10, idx++], null, rotation, viewport, mag); + + /* + * Hardcode these values if you want to focus on a specific test. + */ + const compactionModes = [true, false]; + const hardcodedBeforeVersionIndex: number | null = null; + const hardcodedAfterVersionIndex: number | null = null; + + const userActions: Action[] = [ + SkeletonTracingActions.deleteTreeAction(2), // delete second tree. one tree remains. + createNode(), // nodeId=1 + createNode(), // nodeId=2 + createNode(), // nodeId=3 + createNode(), // nodeId=4 + createNode(), // nodeId=5 + SkeletonTracingActions.deleteNodeAction(3), // tree components == {1,2} {4,5} + SkeletonTracingActions.createTreeAction(), + createNode(), // nodeId=6 + createNode(), // nodeId=7 + createNode(), // nodeId=8, tree components == {1,2} {4,5} {6,7,8} + SkeletonTracingActions.setTreeNameAction("Special Name", 1), + SkeletonTracingActions.setActiveNodeAction(null), + SkeletonTracingActions.mergeTreesAction(5, 7), // tree components {1,2} {4,5,6,7,8} + SkeletonTracingActions.setActiveNodeAction(2), + createNode(), // nodeId=9, tree components {1,2,9} {4,5,6,7,8} + SkeletonTracingActions.setActiveNodeAction(2), + createNode(), // nodeId=10, tree components {1,2,9,10} {4,5,6,7,8} + SkeletonTracingActions.setActiveNodeAction(1), + createNode(), // nodeId=11, tree components {11,1,2,9,10} {4,5,6,7,8} + SkeletonTracingActions.deleteEdgeAction(1, 2), // tree components {11,1} {2,9,10} {4,5,6,7,8} + SkeletonTracingActions.createTreeAction(), + createNode(), // nodeId=12 + createNode(), // nodeId=13 + createNode(), // nodeId=14, tree components == {1,2} {4,5} {6,7,8} {12,13,14} + SkeletonTracingActions.deleteTreeAction(3), + SkeletonTracingActions.setNodePositionAction([1, 2, 3], 6), + addUserBoundingBoxAction({ + boundingBox: { min: [0, 0, 0], max: [10, 10, 10] }, + name: "UserBBox", + color: [1, 2, 3], + isVisible: true, + }), + changeUserBoundingBoxAction(1, { name: "Updated Name" }), + deleteUserBoundingBoxAction(1), + SkeletonTracingActions.setTreeGroupsAction([ + makeBasicGroupObject(3, "group 3"), + makeBasicGroupObject(7, "group 7"), + ]), + SkeletonTracingActions.setTreeGroupAction(7, 2), + SkeletonTracingActions.setTreeEdgeVisibilityAction(2, false), + ]; + + test("User actions for test should not contain no-ops", () => { + let state = initialState; + for (const action of userActions) { + const newState = combinedReducer(state, action); + expect(newState !== state).toBeTruthy(); + + state = newState; + } + }); + + const beforeVersionIndices = + hardcodedBeforeVersionIndex != null + ? [hardcodedBeforeVersionIndex] + : _.range(0, userActions.length); + + describe.each(compactionModes)( + "[Compaction=%s]: should re-apply update actions from complex diff and get same state", + (withCompaction) => { + describe.each(beforeVersionIndices)("From v=%i", (beforeVersionIndex: number) => { + const afterVersionIndices = + hardcodedAfterVersionIndex != null + ? [hardcodedAfterVersionIndex] + : _.range(beforeVersionIndex, userActions.length + 1); + + test.each(afterVersionIndices)("To v=%i", (afterVersionIndex: number) => { + const state2WithActiveTree = applyActions( + initialState, + userActions.slice(0, beforeVersionIndex), + ); + + const state2WithoutActiveState = applyActions(state2WithActiveTree, [ + SkeletonTracingActions.setActiveNodeAction(null), + setActiveUserBoundingBoxId(null), + ]); + + const actionsToApply = userActions.slice(beforeVersionIndex, afterVersionIndex + 1); + const state3 = applyActions( + state2WithActiveTree, + actionsToApply.concat([ + SkeletonTracingActions.setActiveNodeAction(null), + setActiveUserBoundingBoxId(null), + ]), + ); + expect(state2WithoutActiveState !== state3).toBeTruthy(); + + const skeletonTracing2 = enforceSkeletonTracing(state2WithoutActiveState.annotation); + const skeletonTracing3 = enforceSkeletonTracing(state3.annotation); + + const updateActionsBeforeCompaction = Array.from( + diffSkeletonTracing(skeletonTracing2, skeletonTracing3), + ); + const maybeCompact = withCompaction + ? compactUpdateActions + : (updateActions: UpdateActionWithoutIsolationRequirement[]) => updateActions; + const updateActions = maybeCompact( + updateActionsBeforeCompaction, + skeletonTracing2, + skeletonTracing3, + ) as ApplicableSkeletonUpdateAction[]; + + for (const action of updateActions) { + seenActionTypes.add(action.name); + } + + const reappliedNewState = transformStateAsReadOnly(state2WithoutActiveState, (state) => + applyActions(state, [ + SkeletonTracingActions.applySkeletonUpdateActionsFromServerAction(updateActions), + SkeletonTracingActions.setActiveNodeAction(null), + setActiveUserBoundingBoxId(null), + ]), + ); + + expect(reappliedNewState).toEqual(state3); + }); + }); + }, + ); + + it("should clear the active node if it was deleted", () => { + const createNode = SkeletonTracingActions.createNodeAction( + position, + null, + rotation, + viewport, + mag, + ); + const newState = applyActions(initialState, [ + createNode, // nodeId=1 + createNode, // nodeId=2 + SkeletonTracingActions.setActiveNodeAction(2), + ]); + expect(getActiveNode(enforceSkeletonTracing(newState.annotation))?.id).toBe(2); + + const newState2 = applyActions(newState, [SkeletonTracingActions.deleteNodeAction(2)]); + + const updateActions = withoutUpdateActiveItemTracing( + Array.from( + diffSkeletonTracing(newState.annotation.skeleton!, newState2.annotation.skeleton!), + ), + ) as ApplicableSkeletonUpdateAction[]; + + const newState3 = transformStateAsReadOnly(newState, (state) => + applyActions(state, [ + SkeletonTracingActions.applySkeletonUpdateActionsFromServerAction(updateActions), + ]), + ); + + const { activeNodeId } = enforceSkeletonTracing(newState3.annotation); + expect(activeNodeId).toBeNull(); + }); + + it("should clear the active node and active tree if the active tree was deleted", () => { + const createNode = SkeletonTracingActions.createNodeAction( + position, + null, + rotation, + viewport, + mag, + ); + const newState = applyActions(initialState, [ + createNode, // nodeId=1 + createNode, // nodeId=2 + SkeletonTracingActions.setActiveTreeAction(2), + ]); + expect(getActiveTree(enforceSkeletonTracing(newState.annotation))?.treeId).toBe(2); + + const newState2 = applyActions(newState, [SkeletonTracingActions.deleteTreeAction(2)]); + + const updateActions = withoutUpdateActiveItemTracing( + Array.from( + diffSkeletonTracing(newState.annotation.skeleton!, newState2.annotation.skeleton!), + ), + ) as ApplicableSkeletonUpdateAction[]; + + const newState3 = transformStateAsReadOnly(newState, (state) => + applyActions(state, [ + SkeletonTracingActions.applySkeletonUpdateActionsFromServerAction(updateActions), + ]), + ); + + const { activeTreeId, activeNodeId } = enforceSkeletonTracing(newState3.annotation); + + expect(activeNodeId).toBeNull(); + expect(activeTreeId).toBeNull(); + }); + + afterAll(() => { + // Ensure that each possible action is included in the testing at least once + expect(seenActionTypes).toEqual(new Set(actionNamesList)); + }); +}); + +function _debugLogTrees(prefix: string, state: WebknossosState) { + const size = state.annotation.skeleton!.trees.getOrThrow(1).nodes.size(); + console.log("logTrees. size", size); + for (const tree of state.annotation.skeleton!.trees.values()) { + console.log( + `${prefix}. tree.id=${tree.treeId}.`, + "edges: ", + Array.from(tree.edges.values().map((edge) => `${edge.source}-${edge.target}`)).join(", "), + "nodes: ", + Array.from(tree.nodes.values().map((n) => n.id)).join(", "), + ); + } +} diff --git a/frontend/javascripts/test/reducers/update_action_application/volume.spec.ts b/frontend/javascripts/test/reducers/update_action_application/volume.spec.ts new file mode 100644 index 00000000000..4a2ebcda193 --- /dev/null +++ b/frontend/javascripts/test/reducers/update_action_application/volume.spec.ts @@ -0,0 +1,200 @@ +import update from "immutability-helper"; +import _ from "lodash"; +import { sampleTracingLayer } from "test/fixtures/dataset_server_object"; +import { initialState as defaultVolumeState } from "test/fixtures/volumetracing_object"; +import { chainReduce } from "test/helpers/chainReducer"; +import type { Action } from "viewer/model/actions/actions"; +import { + addUserBoundingBoxAction, + changeUserBoundingBoxAction, + deleteUserBoundingBoxAction, +} from "viewer/model/actions/annotation_actions"; +import * as VolumeTracingActions from "viewer/model/actions/volumetracing_actions"; +import { setActiveUserBoundingBoxId } from "viewer/model/actions/ui_actions"; +import compactUpdateActions from "viewer/model/helpers/compaction/compact_update_actions"; +import { diffVolumeTracing } from "viewer/model/sagas/volumetracing_saga"; +import type { + ApplicableVolumeUpdateAction, + UpdateActionWithoutIsolationRequirement, +} from "viewer/model/sagas/volume/update_actions"; +import { combinedReducer, type WebknossosState } from "viewer/store"; +import { makeBasicGroupObject } from "viewer/view/right-border-tabs/trees_tab/tree_hierarchy_view_helpers"; +import { afterAll, describe, expect, test } from "vitest"; +import { transformStateAsReadOnly } from "test/helpers/utils"; + +const enforceVolumeTracing = (state: WebknossosState) => { + const tracing = state.annotation.volumes[0]; + if (tracing == null || state.annotation.volumes.length !== 1) { + throw new Error("No volume tracing found"); + } + return tracing; +}; + +const initialState: WebknossosState = update(defaultVolumeState, { + annotation: { + restrictions: { + allowUpdate: { + $set: true, + }, + branchPointsAllowed: { + $set: true, + }, + }, + annotationType: { $set: "Explorational" }, + }, + dataset: { + dataSource: { + dataLayers: { + $set: [sampleTracingLayer], + }, + }, + }, +}); + +const { tracingId } = initialState.annotation.volumes[0]; + +const applyActions = chainReduce(combinedReducer); + +// This helper dict exists so that we can ensure via typescript that +// the list contains all members of ApplicableVolumeUpdateAction. As soon as +// ApplicableVolumeUpdateAction is extended with another action, TS will complain +// if the following dictionary doesn't contain that action. +const actionNamesHelper: Record = { + updateLargestSegmentId: true, + updateSegment: true, + createSegment: true, + deleteSegment: true, + updateSegmentGroups: true, + addUserBoundingBoxInVolumeTracing: true, + updateUserBoundingBoxInVolumeTracing: true, + deleteUserBoundingBoxInVolumeTracing: true, + updateSegmentGroupsExpandedState: true, + updateUserBoundingBoxVisibilityInVolumeTracing: true, +}; +const actionNamesList = Object.keys(actionNamesHelper); + +describe("Update Action Application for VolumeTracing", () => { + const seenActionTypes = new Set(); + + /* + * Hardcode these values if you want to focus on a specific test. + */ + const compactionModes = [true, false]; + const hardcodedBeforeVersionIndex: number | null = null; + const hardcodedAfterVersionIndex: number | null = null; + + const userActions: Action[] = [ + VolumeTracingActions.updateSegmentAction(2, { somePosition: [1, 2, 3] }, tracingId), + VolumeTracingActions.updateSegmentAction(3, { somePosition: [3, 4, 5] }, tracingId), + VolumeTracingActions.updateSegmentAction( + 3, + { + name: "name", + groupId: 3, + metadata: [ + { + key: "someKey", + stringValue: "some string value", + }, + ], + }, + tracingId, + ), + addUserBoundingBoxAction({ + boundingBox: { min: [0, 0, 0], max: [10, 10, 10] }, + name: "UserBBox", + color: [1, 2, 3], + isVisible: true, + }), + changeUserBoundingBoxAction(1, { name: "Updated Name" }), + deleteUserBoundingBoxAction(1), + VolumeTracingActions.setSegmentGroupsAction( + [makeBasicGroupObject(3, "group 3"), makeBasicGroupObject(7, "group 7")], + tracingId, + ), + VolumeTracingActions.removeSegmentAction(3, tracingId), + VolumeTracingActions.setLargestSegmentIdAction(10000), + ]; + + test("User actions for test should not contain no-ops", () => { + let state = initialState; + for (const action of userActions) { + const newState = combinedReducer(state, action); + expect(newState !== state).toBeTruthy(); + + state = newState; + } + }); + + const beforeVersionIndices = + hardcodedBeforeVersionIndex != null + ? [hardcodedBeforeVersionIndex] + : _.range(0, userActions.length); + + describe.each(compactionModes)( + "[Compaction=%s]: should re-apply update actions from complex diff and get same state", + (withCompaction) => { + describe.each(beforeVersionIndices)("From v=%i", (beforeVersionIndex: number) => { + const afterVersionIndices = + hardcodedAfterVersionIndex != null + ? [hardcodedAfterVersionIndex] + : _.range(beforeVersionIndex, userActions.length + 1); + + test.each(afterVersionIndices)("To v=%i", (afterVersionIndex: number) => { + const state2WithActiveTree = applyActions( + initialState, + userActions.slice(0, beforeVersionIndex), + ); + + const state2WithoutActiveState = applyActions(state2WithActiveTree, [ + VolumeTracingActions.setActiveCellAction(0), + setActiveUserBoundingBoxId(null), + ]); + + const actionsToApply = userActions.slice(beforeVersionIndex, afterVersionIndex + 1); + const state3 = applyActions( + state2WithActiveTree, + actionsToApply.concat([ + VolumeTracingActions.setActiveCellAction(0), + setActiveUserBoundingBoxId(null), + ]), + ); + expect(state2WithoutActiveState !== state3).toBeTruthy(); + + const volumeTracing2 = enforceVolumeTracing(state2WithoutActiveState); + const volumeTracing3 = enforceVolumeTracing(state3); + + const updateActionsBeforeCompaction = Array.from( + diffVolumeTracing(volumeTracing2, volumeTracing3), + ); + const maybeCompact = withCompaction + ? compactUpdateActions + : (updateActions: UpdateActionWithoutIsolationRequirement[]) => updateActions; + const updateActions = maybeCompact( + updateActionsBeforeCompaction, + volumeTracing2, + volumeTracing3, + ) as ApplicableVolumeUpdateAction[]; + + for (const action of updateActions) { + seenActionTypes.add(action.name); + } + + const reappliedNewState = transformStateAsReadOnly(state2WithoutActiveState, (state) => + applyActions(state, [ + VolumeTracingActions.applyVolumeUpdateActionsFromServerAction(updateActions), + VolumeTracingActions.setActiveCellAction(0), + setActiveUserBoundingBoxId(null), + ]), + ); + + expect(reappliedNewState).toEqual(state3); + }); + }); + }, + ); + + afterAll(() => { + expect(seenActionTypes).toEqual(new Set(actionNamesList)); + }); +}); diff --git a/frontend/javascripts/test/reducers/volumetracing_reducer.spec.ts b/frontend/javascripts/test/reducers/volumetracing_reducer.spec.ts index b400d47bffb..a2f0386b86c 100644 --- a/frontend/javascripts/test/reducers/volumetracing_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/volumetracing_reducer.spec.ts @@ -19,6 +19,8 @@ export function getFirstVolumeTracingOrFail(annotation: StoreAnnotation): Volume throw new Error("Annotation is not of type volume!"); } +const INITIAL_LARGEST_SEGMENT_ID = initialState.annotation.volumes[0].largestSegmentId ?? 0; + describe("VolumeTracing", () => { it("should set a new active cell", () => { const createCellAction = VolumeTracingActions.createCellAction(1000, 1000); @@ -66,7 +68,7 @@ describe("VolumeTracing", () => { // Create cell const newState = VolumeTracingReducer(initialState, createCellAction); const tracing = getFirstVolumeTracingOrFail(newState.annotation); - expect(tracing.activeCellId).toBe(1); + expect(tracing.activeCellId).toBe(INITIAL_LARGEST_SEGMENT_ID + 1); }); it("should create a non-existing cell id and not update the largestSegmentId", () => { @@ -79,7 +81,7 @@ describe("VolumeTracing", () => { const newState = VolumeTracingReducer(initialState, createCellAction); const tracing = getFirstVolumeTracingOrFail(newState.annotation); - expect(tracing.largestSegmentId).toBe(0); + expect(tracing.largestSegmentId).toBe(INITIAL_LARGEST_SEGMENT_ID); }); it("should create an existing cell and not update the largestSegmentId", () => { @@ -121,6 +123,9 @@ describe("VolumeTracing", () => { largestSegmentId: { $set: LARGEST_SEGMENT_ID, }, + activeCellId: { + $set: LARGEST_SEGMENT_ID, + }, }, }, }, diff --git a/frontend/javascripts/test/sagas/annotation_saga.spec.ts b/frontend/javascripts/test/sagas/annotation_saga.spec.ts index 142a9f32eb5..a90f3f0894e 100644 --- a/frontend/javascripts/test/sagas/annotation_saga.spec.ts +++ b/frontend/javascripts/test/sagas/annotation_saga.spec.ts @@ -12,7 +12,7 @@ import { } from "viewer/model/actions/annotation_actions"; import { ensureWkReady } from "viewer/model/sagas/ready_sagas"; import { wkReadyAction } from "viewer/model/actions/actions"; -import { acquireAnnotationMutexMaybe } from "viewer/model/sagas/annotation_saga"; +import { acquireAnnotationMutexMaybe } from "viewer/model/sagas/saving/save_mutex_saga"; const createInitialState = ( othersMayEdit: boolean, diff --git a/frontend/javascripts/test/sagas/bounding_box_saving.spec.ts b/frontend/javascripts/test/sagas/bounding_box_saving.spec.ts index 7225562dfb2..c9f640eb038 100644 --- a/frontend/javascripts/test/sagas/bounding_box_saving.spec.ts +++ b/frontend/javascripts/test/sagas/bounding_box_saving.spec.ts @@ -6,7 +6,7 @@ import { type UpdateUserBoundingBoxInSkeletonTracingAction, updateUserBoundingBoxInVolumeTracing, type UpdateUserBoundingBoxInVolumeTracingAction, -} from "viewer/model/sagas/update_actions"; +} from "viewer/model/sagas/volume/update_actions"; import type { SaveQueueEntry, UserBoundingBox } from "viewer/store"; import { describe, expect, it } from "vitest"; diff --git a/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts b/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts index 514fe6f6c6b..9ee1e408fed 100644 --- a/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts +++ b/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts @@ -7,10 +7,10 @@ import { updateSegmentVisibilityVolumeAction, updateTreeGroupVisibility, updateTreeVisibility, -} from "viewer/model/sagas/update_actions"; +} from "viewer/model/sagas/volume/update_actions"; import { withoutUpdateSegment, - withoutUpdateTracing, + withoutUpdateActiveItemTracing, withoutUpdateTree, } from "test/helpers/saveHelpers"; import DiffableMap from "libs/diffable_map"; @@ -162,7 +162,7 @@ function testSkeletonDiffing(prevState: WebknossosState, nextState: WebknossosSt // are creating completely new trees, so that we don't have to go through the // action->reducer pipeline) return withoutUpdateTree( - withoutUpdateTracing( + withoutUpdateActiveItemTracing( Array.from( diffSkeletonTracing( enforceSkeletonTracing(prevState.annotation), @@ -179,7 +179,7 @@ function testVolumeDiffing(prevState: WebknossosState, nextState: WebknossosStat // are creating completely new trees, so that we don't have to go through the // action->reducer pipeline) return withoutUpdateSegment( - withoutUpdateTracing( + withoutUpdateActiveItemTracing( Array.from( diffVolumeTracing(prevState.annotation.volumes[0], nextState.annotation.volumes[0]), ), diff --git a/frontend/javascripts/test/sagas/proofreading.spec.ts b/frontend/javascripts/test/sagas/proofreading.spec.ts new file mode 100644 index 00000000000..93ed28f1740 --- /dev/null +++ b/frontend/javascripts/test/sagas/proofreading.spec.ts @@ -0,0 +1,371 @@ +import type { Saga } from "viewer/model/sagas/effect-generators"; +import { call, put, select, take } from "redux-saga/effects"; +import { sampleHdf5AgglomerateName } from "test/fixtures/dataset_server_object"; +import { powerOrga } from "test/fixtures/dummy_organization"; +import { + createBucketResponseFunction, + setupWebknossosForTesting, + type WebknossosTestContext, +} from "test/helpers/apiHelpers"; +import { getMappingInfo } from "viewer/model/accessors/dataset_accessor"; +import { getCurrentMag } from "viewer/model/accessors/flycam_accessor"; +import { AnnotationTool } from "viewer/model/accessors/tool_accessor"; +import { setZoomStepAction } from "viewer/model/actions/flycam_actions"; +import { setActiveOrganizationAction } from "viewer/model/actions/organization_actions"; +import { + proofreadMergeAction, + minCutAgglomerateWithPositionAction, +} from "viewer/model/actions/proofread_actions"; +import { setMappingAction } from "viewer/model/actions/settings_actions"; +import { setToolAction } from "viewer/model/actions/ui_actions"; +import { + setActiveCellAction, + updateSegmentAction, +} from "viewer/model/actions/volumetracing_actions"; +import { hasRootSagaCrashed } from "viewer/model/sagas/root_saga"; +import { Store } from "viewer/singletons"; +import { startSaga } from "viewer/store"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import { tryToIncorporateActions } from "viewer/model/sagas/saving/save_saga"; + +function* initializeMappingAndTool(context: WebknossosTestContext, tracingId: string): Saga { + const { api } = context; + // Set up organization with power plan (necessary for proofreading) + // and zoom in so that buckets in mag 1, 1, 1 are loaded. + yield put(setActiveOrganizationAction(powerOrga)); + yield put(setZoomStepAction(0.3)); + const currentMag = yield select((state) => getCurrentMag(state, tracingId)); + expect(currentMag).toEqual([1, 1, 1]); + + // Activate agglomerate mapping and wait for finished mapping initialization + // (unfortunately, that action is dispatched twice; once for the activation and once + // for the changed BucketRetrievalSource). Ideally, this should be refactored away. + yield put(setMappingAction(tracingId, sampleHdf5AgglomerateName, "HDF5")); + yield take("FINISH_MAPPING_INITIALIZATION"); + + yield take("FINISH_MAPPING_INITIALIZATION"); + + // Activate the proofread tool. WK will reload the bucket data and apply the mapping + // locally (acknowledged by FINISH_MAPPING_INITIALIZATION). + yield put(setToolAction(AnnotationTool.PROOFREAD)); + yield take("FINISH_MAPPING_INITIALIZATION"); + + // Read data from the 0,0,0 bucket so that it is in memory (important because the mapping + // is only maintained for loaded buckets). + const valueAt444 = yield call(() => api.data.getDataValue(tracingId, [4, 4, 4], 0)); + expect(valueAt444).toBe(4); + // Once again, we wait for FINISH_MAPPING_INITIALIZATION because the mapping is updated + // for the keys that are found in the newly loaded bucket. + yield take("FINISH_MAPPING_INITIALIZATION"); +} + +function mockInitialBucketAndAgglomerateData(context: WebknossosTestContext) { + const { mocks } = context; + vi.mocked(mocks.Request).sendJSONReceiveArraybufferWithHeaders.mockImplementation( + createBucketResponseFunction(Uint16Array, 1, 5, [ + { position: [0, 0, 0], value: 1337 }, + { position: [1, 1, 1], value: 1 }, + { position: [2, 2, 2], value: 2 }, + { position: [3, 3, 3], value: 3 }, + { position: [4, 4, 4], value: 4 }, + { position: [5, 5, 5], value: 5 }, + { position: [6, 6, 6], value: 6 }, + { position: [7, 7, 7], value: 7 }, + ]), + ); + mocks.getCurrentMappingEntriesFromServer.mockReturnValue([ + [1, 10], + [2, 10], + [3, 10], + [4, 11], + [5, 11], + [6, 12], + [7, 12], + [1337, 1337], + ]); +} + +const initialMapping = new Map([ + [1, 10], + [2, 10], + [3, 10], + [4, 11], + [5, 11], + [6, 12], + [7, 12], + [1337, 1337], +]); + +const expectedMappingAfterMerge = new Map([ + [1, 10], + [2, 10], + [3, 10], + [4, 10], + [5, 10], + [6, 12], + [7, 12], + [1337, 1337], +]); + +const expectedMappingAfterSplit = new Map([ + [1, 9], + [2, 10], + [3, 10], + [4, 11], + [5, 11], + [6, 12], + [7, 12], + [1337, 1337], +]); + +describe("Proofreading", () => { + beforeEach(async (context) => { + await setupWebknossosForTesting(context, "hybrid"); + }); + + afterEach(async (context) => { + context.tearDownPullQueues(); + // Saving after each test and checking that the root saga didn't crash, + expect(hasRootSagaCrashed()).toBe(false); + }); + + it("should merge two agglomerates and update the mapping accordingly", async (context: WebknossosTestContext) => { + const { api } = context; + mockInitialBucketAndAgglomerateData(context); + + const { annotation } = Store.getState(); + const { tracingId } = annotation.volumes[0]; + + const task = startSaga(function* () { + yield call(initializeMappingAndTool, context, tracingId); + const mapping0 = yield select( + (state) => + getMappingInfo(state.temporaryConfiguration.activeMappingByLayer, tracingId).mapping, + ); + expect(mapping0).toEqual(initialMapping); + + // Set up the merge-related segment partners. Normally, this would happen + // due to the user's interactions. + yield put(updateSegmentAction(1, { somePosition: [1, 1, 1] }, tracingId)); + yield put(setActiveCellAction(1)); + + // Execute the actual merge and wait for the finished mapping. + yield put(proofreadMergeAction([4, 4, 4], 1, 4)); + yield take("FINISH_MAPPING_INITIALIZATION"); + + const mapping = yield select( + (state) => + getMappingInfo(state.temporaryConfiguration.activeMappingByLayer, tracingId).mapping, + ); + + expect(mapping).toEqual(expectedMappingAfterMerge); + + yield call(() => api.tracing.save()); + + const mergeSaveActionBatch = context.receivedDataPerSaveRequest.at(-1)![0]?.actions; + + expect(mergeSaveActionBatch).toEqual([ + { + name: "mergeAgglomerate", + value: { + actionTracingId: "volumeTracingId", + agglomerateId1: 10, + agglomerateId2: 11, + segmentId1: 1, + segmentId2: 4, + mag: [1, 1, 1], + }, + }, + ]); + }); + + await task.toPromise(); + }, 8000); + + it("should split two agglomerates and update the mapping accordingly", async (context: WebknossosTestContext) => { + const { api, mocks } = context; + mockInitialBucketAndAgglomerateData(context); + + const { annotation } = Store.getState(); + const { tracingId } = annotation.volumes[0]; + + const task = startSaga(function* () { + yield call(initializeMappingAndTool, context, tracingId); + + const mapping0 = yield select( + (state) => + getMappingInfo(state.temporaryConfiguration.activeMappingByLayer, tracingId).mapping, + ); + expect(mapping0).toEqual(initialMapping); + + // Set up the merge-related segment partners. Normally, this would happen + // due to the user's interactions. + yield put(updateSegmentAction(1, { somePosition: [1, 1, 1] }, tracingId)); + yield put(setActiveCellAction(1)); + + // Prepare the server's reply for the upcoming split. + vi.mocked(mocks.getEdgesForAgglomerateMinCut).mockReturnValue( + Promise.resolve([ + { + position1: [1, 1, 1], + position2: [2, 2, 2], + segmentId1: 1, + segmentId2: 2, + }, + ]), + ); + // Already prepare the server's reply for mapping requests that will be sent + // after the split. + mocks.getCurrentMappingEntriesFromServer.mockReturnValue([ + [1, 9], + [2, 10], + [3, 10], + ]); + + // Execute the split and wait for the finished mapping. + yield put(minCutAgglomerateWithPositionAction([2, 2, 2], 2, 10)); + yield take("FINISH_MAPPING_INITIALIZATION"); + + const mapping1 = yield select( + (state) => + getMappingInfo(state.temporaryConfiguration.activeMappingByLayer, tracingId).mapping, + ); + + expect(mapping1).toEqual(expectedMappingAfterSplit); + + yield call(() => api.tracing.save()); + + const mergeSaveActionBatch = context.receivedDataPerSaveRequest.at(-1)![0]?.actions; + + expect(mergeSaveActionBatch).toEqual([ + { + name: "splitAgglomerate", + value: { + actionTracingId: "volumeTracingId", + agglomerateId: 10, + segmentId1: 1, + segmentId2: 2, + mag: [1, 1, 1], + }, + }, + ]); + }); + + await task.toPromise(); + }, 8000); + + it("should update the mapping when the server has a new update action with a merge operation", async (context: WebknossosTestContext) => { + const { api } = context; + mockInitialBucketAndAgglomerateData(context); + + const { annotation } = Store.getState(); + const { tracingId } = annotation.volumes[0]; + + const task = startSaga(function* () { + yield call(initializeMappingAndTool, context, tracingId); + + const mapping0 = yield select( + (state) => + getMappingInfo(state.temporaryConfiguration.activeMappingByLayer, tracingId).mapping, + ); + expect(mapping0).toEqual(initialMapping); + yield call(() => api.tracing.save()); + context.receivedDataPerSaveRequest = []; + + yield call(tryToIncorporateActions, [ + { + version: 1, + value: [ + { + name: "mergeAgglomerate", + value: { + actionTracingId: "volumeTracingId", + actionTimestamp: 0, + agglomerateId1: 10, + agglomerateId2: 11, + segmentId1: 1, + segmentId2: 4, + mag: [1, 1, 1], + }, + }, + ], + }, + ]); + + yield take("FINISH_MAPPING_INITIALIZATION"); + + const mapping1 = yield select( + (state) => + getMappingInfo(state.temporaryConfiguration.activeMappingByLayer, tracingId).mapping, + ); + + expect(mapping1).toEqual(expectedMappingAfterMerge); + + yield call(() => api.tracing.save()); + + expect(context.receivedDataPerSaveRequest).toEqual([]); + }); + + await task.toPromise(); + }, 8000); + + it("should update the mapping when the server has a new update action with a split operation", async (context: WebknossosTestContext) => { + const { api, mocks } = context; + mockInitialBucketAndAgglomerateData(context); + + const { annotation } = Store.getState(); + const { tracingId } = annotation.volumes[0]; + + const task = startSaga(function* () { + yield call(initializeMappingAndTool, context, tracingId); + + const mapping0 = yield select( + (state) => + getMappingInfo(state.temporaryConfiguration.activeMappingByLayer, tracingId).mapping, + ); + expect(mapping0).toEqual(initialMapping); + yield call(() => api.tracing.save()); + context.receivedDataPerSaveRequest = []; + + // Already prepare the server's reply for mapping requests that will be sent + // after the split. + mocks.getCurrentMappingEntriesFromServer.mockReturnValue([ + [1, 9], + [2, 10], + [3, 10], + ]); + + yield call(tryToIncorporateActions, [ + { + version: 1, + value: [ + { + name: "splitAgglomerate", + value: { + actionTracingId: "volumeTracingId", + actionTimestamp: 0, + agglomerateId: 10, + segmentId1: 1, + segmentId2: 2, + mag: [1, 1, 1], + }, + }, + ], + }, + ]); + + const mapping1 = yield select( + (state) => + getMappingInfo(state.temporaryConfiguration.activeMappingByLayer, tracingId).mapping, + ); + + expect(mapping1).toEqual(expectedMappingAfterSplit); + + yield call(() => api.tracing.save()); + + expect(context.receivedDataPerSaveRequest).toEqual([]); + }); + + await task.toPromise(); + }, 8000); +}); diff --git a/frontend/javascripts/test/sagas/saga_integration.spec.ts b/frontend/javascripts/test/sagas/saga_integration.spec.ts index 10c0378ae22..2d7d714bfff 100644 --- a/frontend/javascripts/test/sagas/saga_integration.spec.ts +++ b/frontend/javascripts/test/sagas/saga_integration.spec.ts @@ -3,7 +3,7 @@ import { setupWebknossosForTesting, type WebknossosTestContext } from "test/help import { createSaveQueueFromUpdateActions } from "test/helpers/saveHelpers"; import { enforceSkeletonTracing } from "viewer/model/accessors/skeletontracing_accessor"; import { getStats } from "viewer/model/accessors/annotation_accessor"; -import { MAXIMUM_ACTION_COUNT_PER_BATCH } from "viewer/model/sagas/save_saga_constants"; +import { MAXIMUM_ACTION_COUNT_PER_BATCH } from "viewer/model/sagas/saving/save_saga_constants"; import Store from "viewer/store"; import generateDummyTrees from "viewer/model/helpers/generate_dummy_trees"; import { hasRootSagaCrashed } from "viewer/model/sagas/root_saga"; @@ -19,7 +19,7 @@ import { deleteNodeAction, } from "viewer/model/actions/skeletontracing_actions"; import { discardSaveQueuesAction } from "viewer/model/actions/save_actions"; -import * as UpdateActions from "viewer/model/sagas/update_actions"; +import * as UpdateActions from "viewer/model/sagas/volume/update_actions"; import { TIMESTAMP } from "test/global_mocks"; describe("Saga Integration Tests", () => { diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index baab497d417..b3460f89180 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -10,13 +10,13 @@ import { UnitLong } from "viewer/constants"; import { put, take, call } from "redux-saga/effects"; import * as SaveActions from "viewer/model/actions/save_actions"; -import * as UpdateActions from "viewer/model/sagas/update_actions"; +import * as UpdateActions from "viewer/model/sagas/volume/update_actions"; import { pushSaveQueueAsync, sendSaveRequestToServer, toggleErrorHighlighting, addVersionNumbers, -} from "viewer/model/sagas/save_saga"; +} from "viewer/model/sagas/saving/save_queue_draining"; import { TIMESTAMP } from "test/global_mocks"; import { sendSaveRequestWithToken } from "admin/rest_api"; diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index ac8124e8afd..3e983bb3cd8 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -2,7 +2,6 @@ import { setupWebknossosForTesting, type WebknossosTestContext } from "test/help import type { SkeletonTracing, StoreAnnotation } from "viewer/store"; import { describe, it, expect, beforeEach, afterEach } from "vitest"; import Store from "viewer/store"; -import { hasRootSagaCrashed } from "viewer/model/sagas/root_saga"; import { chainReduce } from "test/helpers/chainReducer"; import DiffableMap from "libs/diffable_map"; @@ -11,11 +10,14 @@ import compactSaveQueue from "viewer/model/helpers/compaction/compact_save_queue import compactUpdateActions from "viewer/model/helpers/compaction/compact_update_actions"; import defaultState from "viewer/default_state"; import update from "immutability-helper"; -import { createSaveQueueFromUpdateActions, withoutUpdateTracing } from "../helpers/saveHelpers"; +import { + createSaveQueueFromUpdateActions, + withoutUpdateActiveItemTracing, +} from "../helpers/saveHelpers"; import { MISSING_GROUP_ID } from "viewer/view/right-border-tabs/trees_tab/tree_hierarchy_view_helpers"; import { TreeTypeEnum } from "viewer/constants"; import { enforceSkeletonTracing } from "viewer/model/accessors/skeletontracing_accessor"; -import type { UpdateActionWithoutIsolationRequirement } from "viewer/model/sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "viewer/model/sagas/volume/update_actions"; import type { TracingStats } from "viewer/model/accessors/annotation_accessor"; import { diffSkeletonTracing } from "viewer/model/sagas/skeletontracing_saga"; import * as SkeletonTracingActions from "viewer/model/actions/skeletontracing_actions"; @@ -23,11 +25,12 @@ import SkeletonTracingReducer from "viewer/model/reducers/skeletontracing_reduce import { TIMESTAMP } from "test/global_mocks"; import { type Tree, TreeMap } from "viewer/model/types/tree_types"; import { Model } from "viewer/singletons"; +import { hasRootSagaCrashed } from "viewer/model/sagas/root_saga"; -const actionTracingId = "tracingId"; +const actionTracingId = "skeletonTracingId"; function testDiffing(prevAnnotation: StoreAnnotation, nextAnnotation: StoreAnnotation) { - return withoutUpdateTracing( + return withoutUpdateActiveItemTracing( Array.from( diffSkeletonTracing( enforceSkeletonTracing(prevAnnotation), @@ -40,12 +43,13 @@ function testDiffing(prevAnnotation: StoreAnnotation, nextAnnotation: StoreAnnot function createCompactedSaveQueueFromUpdateActions( updateActions: UpdateActionWithoutIsolationRequirement[][], timestamp: number, + prevTracing: SkeletonTracing, tracing: SkeletonTracing, stats: TracingStats | null = null, ) { return compactSaveQueue( createSaveQueueFromUpdateActions( - updateActions.map((batch) => compactUpdateActions(batch, tracing)), + updateActions.map((batch) => compactUpdateActions(batch, prevTracing, tracing)), timestamp, stats, ), @@ -71,7 +75,7 @@ const skeletonTreeOne: Tree = { const skeletonTracing: SkeletonTracing = { type: "skeleton", createdTimestamp: 0, - tracingId: "tracingId", + tracingId: "skeletonTracingId", trees: new TreeMap([[1, skeletonTreeOne]]), treeGroups: [], activeGroupId: null, @@ -134,7 +138,6 @@ describe("SkeletonTracingSaga", () => { context.tearDownPullQueues(); // Saving after each test and checking that the root saga didn't crash, // ensures that each test is cleanly exited. Without it weird output can - // occur (e.g., a promise gets resolved which interferes with the next test). expect(hasRootSagaCrashed()).toBe(false); }); @@ -567,7 +570,8 @@ describe("SkeletonTracingSaga", () => { const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( [updateActions], TIMESTAMP, - skeletonTracing, + testState.annotation.skeleton!, + newState.annotation.skeleton!, ); const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; @@ -630,7 +634,8 @@ describe("SkeletonTracingSaga", () => { const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( updateActions, TIMESTAMP, - skeletonTracing, + newState1.annotation.skeleton!, + newState2.annotation.skeleton!, ); // This should result in one created node and its edge (a) @@ -696,11 +701,11 @@ describe("SkeletonTracingSaga", () => { // Create three nodes in the first tree, then create a second tree with one node const testState = applyActions(initialState, [ - createNodeAction, - createNodeAction, - createNodeAction, + createNodeAction, // nodeId=1 + createNodeAction, // nodeId=2 + createNodeAction, // nodeId=3 createTreeAction, - createNodeAction, + createNodeAction, // nodeId=4 ]); // Merge the second tree into the first tree (a) @@ -711,8 +716,8 @@ describe("SkeletonTracingSaga", () => { // Create another tree and two nodes (b) const newState = applyActions(stateAfterFirstMerge, [ createTreeAction, - createNodeAction, - createNodeAction, + createNodeAction, // nodeId=5 + createNodeAction, // nodeId=6 ]); updateActions.push(testDiffing(stateAfterFirstMerge.annotation, newState.annotation)); @@ -725,7 +730,8 @@ describe("SkeletonTracingSaga", () => { const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( updateActions, TIMESTAMP, - skeletonTracing, + testState.annotation.skeleton!, + newState.annotation.skeleton!, ); // This should result in a moved treeComponent of size one (a) @@ -814,7 +820,8 @@ describe("SkeletonTracingSaga", () => { const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( [updateActions], TIMESTAMP, - skeletonTracing, + testState.annotation.skeleton!, + newState.annotation.skeleton!, ); // This should result in a new tree @@ -873,7 +880,8 @@ describe("SkeletonTracingSaga", () => { const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( [updateActions], TIMESTAMP, - skeletonTracing, + testState.annotation.skeleton!, + newState.annotation.skeleton!, ); // This should result in two new trees and two moved treeComponents of size three and two @@ -932,29 +940,35 @@ describe("SkeletonTracingSaga", () => { // Create six nodes const testState = applyActions(initialState, [ - createNodeAction, - createNodeAction, - createNodeAction, - createNodeAction, - createNodeAction, - createNodeAction, + createNodeAction, // nodeId=1 + createNodeAction, // nodeId=2 <-- will be deleted + createNodeAction, // nodeId=3 + createNodeAction, // nodeId=4 <-- will be deleted + createNodeAction, // nodeId=5 + createNodeAction, // nodeId=6 ]); // Delete the second node to split the tree (a) const newState1 = SkeletonTracingReducer(testState, deleteMiddleNodeAction); - const updateActions = []; - updateActions.push(testDiffing(testState.annotation, newState1.annotation)); + const updateActions1 = [testDiffing(testState.annotation, newState1.annotation)]; + const simplifiedUpdateActions1 = createCompactedSaveQueueFromUpdateActions( + updateActions1, + TIMESTAMP, + testState.annotation.skeleton!, + newState1.annotation.skeleton!, + ); // Delete node 4 to split the tree again (b) const newState2 = SkeletonTracingReducer(newState1, deleteOtherMiddleNodeAction); - updateActions.push(testDiffing(newState1.annotation, newState2.annotation)); - const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( - updateActions, + const updateActions2 = [testDiffing(newState1.annotation, newState2.annotation)]; + const simplifiedUpdateActions2 = createCompactedSaveQueueFromUpdateActions( + updateActions2, TIMESTAMP, - skeletonTracing, + newState1.annotation.skeleton!, + newState2.annotation.skeleton!, ); // This should result in the creation of a new tree (a) - const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; + const simplifiedFirstBatch = simplifiedUpdateActions1[0].actions; expect(simplifiedFirstBatch[0]).toMatchObject({ name: "createTree", value: { @@ -984,9 +998,11 @@ describe("SkeletonTracingSaga", () => { expect(simplifiedFirstBatch[3].name).toBe("deleteEdge"); expect(simplifiedFirstBatch[4].name).toBe("deleteEdge"); expect(simplifiedFirstBatch.length).toBe(5); + expect(simplifiedUpdateActions1.length).toBe(1); // the creation of a new tree (b) - const simplifiedSecondBatch = simplifiedUpdateActions[1].actions; + const simplifiedSecondBatch = simplifiedUpdateActions2[0].actions; + expect(simplifiedUpdateActions2.length).toBe(1); expect(simplifiedSecondBatch[0]).toMatchObject({ name: "createTree", value: { @@ -1016,6 +1032,7 @@ describe("SkeletonTracingSaga", () => { expect(simplifiedSecondBatch[3].name).toBe("deleteEdge"); expect(simplifiedSecondBatch[4].name).toBe("deleteEdge"); expect(simplifiedSecondBatch.length).toBe(5); + expect(simplifiedUpdateActions2.length).toBe(1); }); it("compactUpdateActions should do nothing if it cannot compact", () => { @@ -1042,7 +1059,8 @@ describe("SkeletonTracingSaga", () => { const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( [updateActions], TIMESTAMP, - skeletonTracing, + testState.annotation.skeleton!, + newState.annotation.skeleton!, ); // The deleteTree optimization in compactUpdateActions (that is unrelated to this test) // will remove the first deleteNode update action as the first tree is deleted because of the merge, @@ -1068,7 +1086,8 @@ describe("SkeletonTracingSaga", () => { const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( [updateActions], TIMESTAMP, - skeletonTracing, + testState.annotation.skeleton!, + newState.annotation.skeleton!, ); const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; @@ -1097,7 +1116,8 @@ describe("SkeletonTracingSaga", () => { const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( [updateActions], TIMESTAMP, - skeletonTracing, + testState.annotation.skeleton!, + newState.annotation.skeleton!, ); const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; diff --git a/frontend/javascripts/test/sagas/volumetracing/volumetracing_remote_bucket_updates.spec.ts b/frontend/javascripts/test/sagas/volumetracing/volumetracing_remote_bucket_updates.spec.ts new file mode 100644 index 00000000000..88eeb6d2968 --- /dev/null +++ b/frontend/javascripts/test/sagas/volumetracing/volumetracing_remote_bucket_updates.spec.ts @@ -0,0 +1,84 @@ +import { + createBucketResponseFunction, + setupWebknossosForTesting, + type WebknossosTestContext, +} from "test/helpers/apiHelpers"; +import { call } from "typed-redux-saga"; +import type { Vector3 } from "viewer/constants"; +import { hasRootSagaCrashed } from "viewer/model/sagas/root_saga"; +import { tryToIncorporateActions } from "viewer/model/sagas/saving/save_saga"; +import { startSaga } from "viewer/store"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; + +describe("Volume Tracing with remote updates", () => { + beforeEach(async (context) => { + await setupWebknossosForTesting(context, "volume"); + }); + + afterEach(async (context) => { + expect(hasRootSagaCrashed()).toBe(false); + // Saving after each test and checking that the root saga didn't crash, + // ensures that each test is cleanly exited. Without it weird output can + // occur (e.g., a promise gets resolved which interferes with the next test). + await context.api.tracing.save(); + expect(hasRootSagaCrashed()).toBe(false); + context.tearDownPullQueues(); + }); + + it("A bucket should automatically be reloaded if newer data exists on the server", async ({ + api, + mocks, + }) => { + const oldCellId = 11; + + vi.mocked(mocks.Request).sendJSONReceiveArraybufferWithHeaders.mockImplementation( + createBucketResponseFunction(Uint16Array, oldCellId, 5), + ); + + // Reload buckets which might have already been loaded before swapping the sendJSONReceiveArraybufferWithHeaders + // function. + await api.data.reloadAllBuckets(); + + const task = startSaga(function* () { + const position = [0, 0, 0] as Vector3; + const newCellId = 2; + const volumeTracingLayerName = api.data.getVolumeTracingLayerIds()[0]; + + expect( + yield call(() => api.data.getDataValue(volumeTracingLayerName, position)), + "Initially, there should be oldCellId", + ).toBe(oldCellId); + + // Already prepare the updated backend response. + vi.mocked(mocks.Request).sendJSONReceiveArraybufferWithHeaders.mockImplementation( + createBucketResponseFunction(Uint16Array, newCellId, 5), + ); + + yield tryToIncorporateActions([ + { + version: 1, + value: [ + { + name: "updateBucket", + value: { + actionTracingId: "volumeTracingId", + actionTimestamp: 0, + position, + additionalCoordinates: undefined, + mag: [1, 1, 1], + cubeSize: 1024, + base64Data: undefined, // The server will not send this, either. + }, + }, + ], + }, + ]); + + expect(yield call(() => api.data.getDataValue(volumeTracingLayerName, position))).toBe( + newCellId, + ); + }); + + await task.toPromise(); + }); +}); diff --git a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts index 199eba82ad3..a85a713940e 100644 --- a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts @@ -1,7 +1,6 @@ import { it, expect, describe, beforeEach, afterEach } from "vitest"; import { setupWebknossosForTesting, type WebknossosTestContext } from "test/helpers/apiHelpers"; import { take, put, call } from "redux-saga/effects"; -import type { ServerVolumeTracing } from "types/api_types"; import { AnnotationTool } from "viewer/model/accessors/tool_accessor"; import { OrthoViews, @@ -22,41 +21,7 @@ import VolumeLayer from "viewer/model/volumetracing/volumelayer"; import { serverVolumeToClientVolumeTracing } from "viewer/model/reducers/volumetracing_reducer"; import { Model, Store } from "viewer/singletons"; import { hasRootSagaCrashed } from "viewer/model/sagas/root_saga"; - -const serverVolumeTracing: ServerVolumeTracing = { - typ: "Volume", - id: "tracingId", - elementClass: "uint32", - createdTimestamp: 0, - boundingBox: { - topLeft: { - x: 0, - y: 0, - z: 0, - }, - width: 10, - height: 10, - depth: 10, - }, - segments: [], - segmentGroups: [], - additionalAxes: [], - userBoundingBoxes: [], - largestSegmentId: 0, - userStates: [], - zoomLevel: 0, - editPosition: { - x: 0, - y: 0, - z: 0, - }, - editPositionAdditionalCoordinates: null, - editRotation: { - x: 0, - y: 0, - z: 0, - }, -}; +import { tracing as serverVolumeTracing } from "test/fixtures/volumetracing_server_objects"; const volumeTracing = serverVolumeToClientVolumeTracing(serverVolumeTracing, null, null); @@ -85,10 +50,7 @@ describe("VolumeTracingSaga", () => { afterEach(async (context) => { context.tearDownPullQueues(); - // Saving after each test and checking that the root saga didn't crash, - // ensures that each test is cleanly exited. Without it weird output can - // occur (e.g., a promise gets resolved which interferes with the next test). expect(hasRootSagaCrashed()).toBe(false); }); @@ -110,7 +72,7 @@ describe("VolumeTracingSaga", () => { expect(action).toMatchObject({ name: "updateActiveSegmentId", value: { - actionTracingId: "volumeTracingId-1234", + actionTracingId: volumeTracing.tracingId, activeSegmentId: 5, }, }); diff --git a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga_integration_1.spec.ts b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga_integration_1.spec.ts index fd698777bb5..cdc05727bab 100644 --- a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga_integration_1.spec.ts +++ b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga_integration_1.spec.ts @@ -89,7 +89,7 @@ describe("Volume Tracing", () => { ); const cube = api.data.model.getCubeByLayerName(volumeTracingLayerName); - cube.collectAllBuckets(); + cube.removeAllBuckets(); await dispatchUndoAsync(Store.dispatch); diff --git a/frontend/javascripts/test/schemas/dataset_view_configuration.spec.ts b/frontend/javascripts/test/schemas/dataset_view_configuration.spec.ts index b9a163cfb74..894f79a5b69 100644 --- a/frontend/javascripts/test/schemas/dataset_view_configuration.spec.ts +++ b/frontend/javascripts/test/schemas/dataset_view_configuration.spec.ts @@ -1,4 +1,3 @@ -// @ts-nocheck import _ from "lodash"; import { describe, it, expect } from "vitest"; import { validateObjectWithType } from "types/validation"; @@ -7,7 +6,7 @@ import { enforceValidatedDatasetViewConfiguration } from "types/schemas/dataset_ import DATASET from "test/fixtures/dataset_server_object"; const datasetViewConfigurationType = "types::DatasetViewConfiguration"; -const CORRECT_DATASET_CONFIGURATION = { +const CORRECT_DATASET_CONFIGURATION: Record = { fourBit: false, interpolation: true, renderMissingDataBlack: true, diff --git a/frontend/javascripts/types/api_types.ts b/frontend/javascripts/types/api_types.ts index 33416016857..d7e7b1be3b8 100644 --- a/frontend/javascripts/types/api_types.ts +++ b/frontend/javascripts/types/api_types.ts @@ -1,6 +1,8 @@ import type { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; import _ from "lodash"; +import type { BoundingBoxProto } from "types/bounding_box"; import type { + AdditionalCoordinate, ColorObject, LOG_LEVELS, NestedMatrix4, @@ -15,7 +17,7 @@ import type { TracingStats, VolumeTracingStats, } from "viewer/model/accessors/annotation_accessor"; -import type { ServerUpdateAction } from "viewer/model/sagas/update_actions"; +import type { ServerUpdateAction } from "viewer/model/sagas/volume/update_actions"; import type { CommentType, Edge, TreeGroup } from "viewer/model/types/tree_types"; import type { BoundingBoxObject, @@ -25,7 +27,9 @@ import type { } from "viewer/store"; import type { EmptyObject } from "./globals"; -export type AdditionalCoordinate = { name: string; value: number }; +// Re-export +export type { BoundingBoxProto } from "types/bounding_box"; +export type { AdditionalCoordinate } from "viewer/constants"; export type APIMessage = { [key in "info" | "warning" | "error"]?: string }; export type ElementClass = @@ -67,7 +71,7 @@ export type AdditionalAxis = { name: string; }; -export type ServerAdditionalAxis = { +export type AdditionalAxisProto = { bounds: { x: number; y: number }; index: number; name: string; @@ -854,20 +858,14 @@ export type ServerBranchPoint = { createdTimestamp: number; nodeId: number; }; -export type ServerBoundingBox = { - topLeft: Point3; - width: number; - height: number; - depth: number; -}; -export type UserBoundingBoxFromServer = { - boundingBox: ServerBoundingBox; +export type UserBoundingBoxProto = { + boundingBox: BoundingBoxProto; id: number; name?: string; color?: ColorObject; isVisible?: boolean; }; -export type ServerBoundingBoxTypeTuple = { +export type ServerBoundingBoxMinMaxTypeTuple = { topLeft: Vector3; width: number; height: number; @@ -915,11 +913,11 @@ type ServerSegment = { }; export type ServerTracingBase = { id: string; - userBoundingBoxes: Array; - userBoundingBox?: ServerBoundingBox; + userBoundingBoxes: Array; + userBoundingBox?: BoundingBoxProto; createdTimestamp: number; error?: string; - additionalAxes: ServerAdditionalAxis[]; + additionalAxes: AdditionalAxisProto[]; // The backend sends the version property, but the front-end should // not care about it. To ensure this, parseProtoTracing will remove // the property. @@ -950,7 +948,7 @@ export type ServerSkeletonTracing = ServerTracingBase & { // This is done to simplify the selection for the type. typ: "Skeleton"; activeNodeId?: number; // only use as a fallback if userStates is empty - boundingBox?: ServerBoundingBox; + boundingBox?: BoundingBoxProto; trees: Array; treeGroups: Array | null | undefined; storedWithExternalTreeBodies?: boolean; // unused in frontend @@ -973,7 +971,7 @@ export type ServerVolumeTracing = ServerTracingBase & { // This is done to simplify the selection for the type. typ: "Volume"; activeSegmentId?: number; // only use as a fallback if userStates is empty - boundingBox: ServerBoundingBox; + boundingBox: BoundingBoxProto; elementClass: ElementClass; fallbackLayer?: string; segments: Array; diff --git a/frontend/javascripts/types/bounding_box.ts b/frontend/javascripts/types/bounding_box.ts new file mode 100644 index 00000000000..e608a69642a --- /dev/null +++ b/frontend/javascripts/types/bounding_box.ts @@ -0,0 +1,20 @@ +import type { Point3, Vector3 } from "viewer/constants"; + +export type BoundingBoxMinMaxType = { + min: Vector3; + max: Vector3; +}; + +export type BoundingBoxObject = { + readonly topLeft: Vector3; + readonly width: number; + readonly height: number; + readonly depth: number; +}; + +export type BoundingBoxProto = { + topLeft: Point3; + width: number; + height: number; + depth: number; +}; diff --git a/frontend/javascripts/viewer/api/api_latest.ts b/frontend/javascripts/viewer/api/api_latest.ts index 509db1f1507..786d95e5393 100644 --- a/frontend/javascripts/viewer/api/api_latest.ts +++ b/frontend/javascripts/viewer/api/api_latest.ts @@ -20,9 +20,9 @@ import messages from "messages"; import TWEEN from "tween.js"; import { type APICompoundType, APICompoundTypeEnum, type ElementClass } from "types/api_types"; import type { AdditionalCoordinate } from "types/api_types"; +import type { BoundingBoxMinMaxType } from "types/bounding_box"; import type { Writeable } from "types/globals"; import type { - BoundingBoxType, BucketAddress, ControlMode, LabeledVoxelsMap, @@ -1638,7 +1638,7 @@ class DataApi { await Model.ensureSavedState(); } - dataLayer.cube.collectBucketsIf(predicateFn || truePredicate); + dataLayer.cube.removeBucketsIf(predicateFn || truePredicate); dataLayer.layerRenderingManager.refresh(); } }), @@ -1654,7 +1654,7 @@ class DataApi { } Utils.values(this.model.dataLayers).forEach((dataLayer: DataLayer) => { - dataLayer.cube.collectAllBuckets(); + dataLayer.cube.removeAllBuckets(); dataLayer.layerRenderingManager.refresh(); }); } @@ -1887,7 +1887,7 @@ class DataApi { */ async getDataFor2DBoundingBox( layerName: string, - bbox: BoundingBoxType, + bbox: BoundingBoxMinMaxType, _zoomStep: number | null | undefined = null, ) { return this.getDataForBoundingBox(layerName, bbox, _zoomStep); @@ -1900,7 +1900,7 @@ class DataApi { */ async getDataForBoundingBox( layerName: string, - mag1Bbox: BoundingBoxType, + mag1Bbox: BoundingBoxMinMaxType, _zoomStep: number | null | undefined = null, additionalCoordinates: AdditionalCoordinate[] | null = null, ) { @@ -1989,7 +1989,7 @@ class DataApi { } getBucketAddressesInCuboid( - bbox: BoundingBoxType, + bbox: BoundingBoxMinMaxType, magnifications: Array, zoomStep: number, additionalCoordinates: AdditionalCoordinate[] | null, @@ -2036,7 +2036,7 @@ class DataApi { cutOutCuboid( buckets: Array, - bbox: BoundingBoxType, + bbox: BoundingBoxMinMaxType, elementClass: ElementClass, magnifications: Array, zoomStep: number, diff --git a/frontend/javascripts/viewer/api/wk_dev.ts b/frontend/javascripts/viewer/api/wk_dev.ts index 9a28e7f959b..5b5330225ec 100644 --- a/frontend/javascripts/viewer/api/wk_dev.ts +++ b/frontend/javascripts/viewer/api/wk_dev.ts @@ -11,7 +11,7 @@ import type ApiLoader from "./api_loader"; // Can be accessed via window.webknossos.DEV.flags. Only use this // for debugging or one off scripts. export const WkDevFlags = { - logActions: false, + logActions: true, sam: { useLocalMask: true, }, @@ -27,6 +27,9 @@ export const WkDevFlags = { // it needs to be set to true before the rendering is initialized. disableLayerNameSanitization: false, }, + debugging: { + showCurrentVersionInInfoTab: true, + }, meshing: { marchingCubeSizeInTargetMag: [64, 64, 64] as Vector3, }, diff --git a/frontend/javascripts/viewer/constants.ts b/frontend/javascripts/viewer/constants.ts index 1744a7a9745..733aa966509 100644 --- a/frontend/javascripts/viewer/constants.ts +++ b/frontend/javascripts/viewer/constants.ts @@ -1,4 +1,4 @@ -import type { AdditionalCoordinate } from "types/api_types"; +export type AdditionalCoordinate = { name: string; value: number }; export const ViewModeValues = ["orthogonal", "flight", "oblique"] as ViewMode[]; @@ -38,10 +38,6 @@ export type ColorObject = { b: number; a: number; }; -export type BoundingBoxType = { - min: Vector3; - max: Vector3; -}; export type Rect = { top: number; left: number; diff --git a/frontend/javascripts/viewer/controller/combinations/bounding_box_handlers.ts b/frontend/javascripts/viewer/controller/combinations/bounding_box_handlers.ts index b84d77284a6..3c19a064892 100644 --- a/frontend/javascripts/viewer/controller/combinations/bounding_box_handlers.ts +++ b/frontend/javascripts/viewer/controller/combinations/bounding_box_handlers.ts @@ -1,7 +1,8 @@ import { V3 } from "libs/mjs"; import { document } from "libs/window"; import _ from "lodash"; -import type { BoundingBoxType, OrthoView, Point2, Vector2, Vector3 } from "viewer/constants"; +import type { BoundingBoxMinMaxType } from "types/bounding_box"; +import type { OrthoView, Point2, Vector2, Vector3 } from "viewer/constants"; import getSceneController from "viewer/controller/scene_controller_provider"; import { getSomeTracing } from "viewer/model/accessors/tracing_accessor"; import { @@ -115,7 +116,7 @@ export type SelectedEdge = { type DistanceArray = [number, number, number, number]; function computeDistanceArray( - boundingBoxBounds: BoundingBoxType, + boundingBoxBounds: BoundingBoxMinMaxType, globalPosition: Vector3, indices: DimensionMap, planeRatio: Vector3, diff --git a/frontend/javascripts/viewer/controller/combinations/tool_controls.ts b/frontend/javascripts/viewer/controller/combinations/tool_controls.ts index 23915beaf21..7fc6832bb44 100644 --- a/frontend/javascripts/viewer/controller/combinations/tool_controls.ts +++ b/frontend/javascripts/viewer/controller/combinations/tool_controls.ts @@ -43,7 +43,7 @@ import { finishedResizingUserBoundingBoxAction } from "viewer/model/actions/anno import { minCutAgglomerateWithPositionAction, proofreadAtPosition, - proofreadMerge, + proofreadMergeAction, } from "viewer/model/actions/proofread_actions"; import { hideMeasurementTooltipAction, @@ -1082,7 +1082,7 @@ export class ProofreadToolController { const globalPosition = calculateGlobalPos(state, pos); if (event.shiftKey) { - Store.dispatch(proofreadMerge(globalPosition)); + Store.dispatch(proofreadMergeAction(globalPosition)); } else if (event.ctrlKey || event.metaKey) { Store.dispatch(minCutAgglomerateWithPositionAction(globalPosition)); } else { diff --git a/frontend/javascripts/viewer/controller/scene_controller.ts b/frontend/javascripts/viewer/controller/scene_controller.ts index 2bbad2759be..08e1f390439 100644 --- a/frontend/javascripts/viewer/controller/scene_controller.ts +++ b/frontend/javascripts/viewer/controller/scene_controller.ts @@ -7,13 +7,8 @@ import _ from "lodash"; import * as THREE from "three"; import { acceleratedRaycast, computeBoundsTree, disposeBoundsTree } from "three-mesh-bvh"; -import type { - BoundingBoxType, - OrthoView, - OrthoViewMap, - OrthoViewWithoutTDMap, - Vector3, -} from "viewer/constants"; +import type { BoundingBoxMinMaxType } from "types/bounding_box"; +import type { OrthoView, OrthoViewMap, OrthoViewWithoutTDMap, Vector3 } from "viewer/constants"; import constants, { OrthoViews, OrthoViewValuesWithoutTDView, @@ -325,7 +320,7 @@ class SceneController { } updateTaskBoundingBoxes( - taskCubeByTracingId: Record, + taskCubeByTracingId: Record, ): void { /* Ensures that a green task bounding box is rendered in the scene for diff --git a/frontend/javascripts/viewer/controller/viewmodes/arbitrary_controller.tsx b/frontend/javascripts/viewer/controller/viewmodes/arbitrary_controller.tsx index c712664de8d..f138a25d71f 100644 --- a/frontend/javascripts/viewer/controller/viewmodes/arbitrary_controller.tsx +++ b/frontend/javascripts/viewer/controller/viewmodes/arbitrary_controller.tsx @@ -35,12 +35,12 @@ import { createBranchPointAction, createNodeAction, createTreeAction, - deleteNodeAsUserAction, requestDeleteBranchPointAction, setActiveNodeAction, toggleAllTreesAction, toggleInactiveTreesAction, } from "viewer/model/actions/skeletontracing_actions"; +import { deleteNodeAsUserAction } from "viewer/model/actions/skeletontracing_actions_with_effects"; import { listenToStoreProperty } from "viewer/model/helpers/listener_helpers"; import { api } from "viewer/singletons"; import Store from "viewer/store"; diff --git a/frontend/javascripts/viewer/controller/viewmodes/plane_controller.tsx b/frontend/javascripts/viewer/controller/viewmodes/plane_controller.tsx index 2fb72a5b7fb..0ae334327b7 100644 --- a/frontend/javascripts/viewer/controller/viewmodes/plane_controller.tsx +++ b/frontend/javascripts/viewer/controller/viewmodes/plane_controller.tsx @@ -44,11 +44,11 @@ import { updateUserSettingAction } from "viewer/model/actions/settings_actions"; import { createBranchPointAction, createTreeAction, - deleteNodeAsUserAction, requestDeleteBranchPointAction, toggleAllTreesAction, toggleInactiveTreesAction, } from "viewer/model/actions/skeletontracing_actions"; +import { deleteNodeAsUserAction } from "viewer/model/actions/skeletontracing_actions_with_effects"; import { cycleToolAction, enterAction, diff --git a/frontend/javascripts/viewer/default_state.ts b/frontend/javascripts/viewer/default_state.ts index ba7a5fc5f3d..58f5162f6f7 100644 --- a/frontend/javascripts/viewer/default_state.ts +++ b/frontend/javascripts/viewer/default_state.ts @@ -1,5 +1,5 @@ import { getSystemColorTheme } from "theme"; -import type { APIAllowedMode, APIAnnotationType, APIAnnotationVisibility } from "types/api_types"; +import type { APIAnnotationType, APIAnnotationVisibility } from "types/api_types"; import { defaultDatasetViewConfiguration } from "types/schemas/dataset_view_configuration.schema"; import Constants, { ControlModeEnum, @@ -9,6 +9,7 @@ import Constants, { TDViewDisplayModeEnum, InterpolationModeEnum, UnitLong, + ViewModeValues, } from "viewer/constants"; import constants from "viewer/constants"; import { AnnotationTool, Toolkit } from "viewer/model/accessors/tool_accessor"; @@ -33,7 +34,7 @@ const initialAnnotationInfo = { somaClickingAllowed: false, mergerMode: false, volumeInterpolationAllowed: false, - allowedModes: ["orthogonal", "oblique", "flight"] as APIAllowedMode[], + allowedModes: ViewModeValues, magRestrictions: {}, }, visibility: "Internal" as APIAnnotationVisibility, @@ -122,6 +123,7 @@ const defaultState: WebknossosState = { }, task: null, dataset: { + areLayersPreprocessed: true, id: "dummy-dataset-id", name: "Loading", folderId: "dummy-folder-id", @@ -176,6 +178,7 @@ const defaultState: WebknossosState = { contributors: [], othersMayEdit: false, blockedByUser: null, + isMutexAcquired: false, annotationLayers: [], version: 0, earliestAccessibleVersion: 0, diff --git a/frontend/javascripts/viewer/geometries/skeleton.ts b/frontend/javascripts/viewer/geometries/skeleton.ts index eb76513426a..d98497be1c1 100644 --- a/frontend/javascripts/viewer/geometries/skeleton.ts +++ b/frontend/javascripts/viewer/geometries/skeleton.ts @@ -11,7 +11,7 @@ import NodeShader, { import { getZoomValue } from "viewer/model/accessors/flycam_accessor"; import { sum } from "viewer/model/helpers/iterator_utils"; import { cachedDiffTrees } from "viewer/model/sagas/skeletontracing_saga"; -import type { CreateActionNode, UpdateActionNode } from "viewer/model/sagas/update_actions"; +import type { CreateActionNode, UpdateActionNode } from "viewer/model/sagas/volume/update_actions"; import type { Edge, Node, Tree } from "viewer/model/types/tree_types"; import type { SkeletonTracing, WebknossosState } from "viewer/store"; import Store from "viewer/throttled_store"; diff --git a/frontend/javascripts/viewer/merger_mode.ts b/frontend/javascripts/viewer/merger_mode.ts index 4bcfbe5b1da..090476b64a6 100644 --- a/frontend/javascripts/viewer/merger_mode.ts +++ b/frontend/javascripts/viewer/merger_mode.ts @@ -17,7 +17,7 @@ import type { DeleteNodeUpdateAction, NodeWithTreeId, UpdateActionNode, -} from "viewer/model/sagas/update_actions"; +} from "viewer/model/sagas/volume/update_actions"; import { api } from "viewer/singletons"; import type { SkeletonTracing, StoreType, WebknossosState } from "viewer/store"; import Store from "viewer/throttled_store"; diff --git a/frontend/javascripts/viewer/model.ts b/frontend/javascripts/viewer/model.ts index 9e7946fd959..ad624437481 100644 --- a/frontend/javascripts/viewer/model.ts +++ b/frontend/javascripts/viewer/model.ts @@ -26,7 +26,9 @@ import Deferred from "libs/async/deferred"; import { globalToLayerTransformedPosition } from "./model/accessors/dataset_layer_transformation_accessor"; import { initialize } from "./model_initialization"; -// TODO: Non-reactive +const WAIT_AFTER_SAVE_TRIGGER = process.env.IS_TESTING ? 50 : 500; + +// TODO: This class should be moved into the store and sagas. export class WebKnossosModel { // @ts-expect-error ts-migrate(2564) FIXME: Property 'dataLayers' has no initializer and is no... Remove this comment to see the full error message dataLayers: Record; @@ -385,7 +387,7 @@ export class WebKnossosModel { Store.dispatch(saveNowAction()); } - await Utils.sleep(500); + await Utils.sleep(WAIT_AFTER_SAVE_TRIGGER); } }; diff --git a/frontend/javascripts/viewer/model/accessors/skeletontracing_accessor.ts b/frontend/javascripts/viewer/model/accessors/skeletontracing_accessor.ts index 02937b11776..d4f24bc5406 100644 --- a/frontend/javascripts/viewer/model/accessors/skeletontracing_accessor.ts +++ b/frontend/javascripts/viewer/model/accessors/skeletontracing_accessor.ts @@ -73,7 +73,7 @@ export function getActiveNode(skeletonTracing: SkeletonTracing): Node | null { const { activeTreeId, activeNodeId } = skeletonTracing; if (activeTreeId != null && activeNodeId != null) { - return skeletonTracing.trees.getOrThrow(activeTreeId).nodes.getOrThrow(activeNodeId); + return skeletonTracing.trees.getNullable(activeTreeId)?.nodes.getNullable(activeNodeId) ?? null; } return null; @@ -86,7 +86,7 @@ export function getActiveTree(skeletonTracing: SkeletonTracing | null | undefine const { activeTreeId } = skeletonTracing; if (activeTreeId != null) { - return skeletonTracing.trees.getNullable(activeTreeId) || null; + return skeletonTracing.trees.getNullable(activeTreeId) ?? null; } return null; diff --git a/frontend/javascripts/viewer/model/accessors/volumetracing_accessor.ts b/frontend/javascripts/viewer/model/accessors/volumetracing_accessor.ts index d52dbf73443..c43f33dab69 100644 --- a/frontend/javascripts/viewer/model/accessors/volumetracing_accessor.ts +++ b/frontend/javascripts/viewer/model/accessors/volumetracing_accessor.ts @@ -73,7 +73,9 @@ export function getVolumeTracingById( const volumeTracing = annotation.volumes.find((t) => t.tracingId === tracingId); if (volumeTracing == null) { - throw new Error(`Could not find volume tracing with id ${tracingId}`); + throw new Error( + `Could not find volume tracing with id ${tracingId}. Only found: ${annotation.volumes.map((t) => t.tracingId)}`, + ); } return volumeTracing; diff --git a/frontend/javascripts/viewer/model/actions/annotation_actions.ts b/frontend/javascripts/viewer/model/actions/annotation_actions.ts index 32dbf01b6b8..97fc207c1e0 100644 --- a/frontend/javascripts/viewer/model/actions/annotation_actions.ts +++ b/frontend/javascripts/viewer/model/actions/annotation_actions.ts @@ -52,13 +52,14 @@ export type EditAnnotationLayerAction = ReturnType; type SetAnnotationAllowUpdateAction = ReturnType; type SetBlockedByUserAction = ReturnType; +export type SetIsMutexAcquiredAction = ReturnType; type SetUserBoundingBoxesAction = ReturnType; type FinishedResizingUserBoundingBoxAction = ReturnType< typeof finishedResizingUserBoundingBoxAction >; type AddUserBoundingBoxesAction = ReturnType; -type AddNewUserBoundingBox = ReturnType; -type ChangeUserBoundingBoxAction = ReturnType; +export type AddNewUserBoundingBox = ReturnType; +export type ChangeUserBoundingBoxAction = ReturnType; type DeleteUserBoundingBox = ReturnType; export type UpdateMeshVisibilityAction = ReturnType; export type UpdateMeshOpacityAction = ReturnType; @@ -87,6 +88,7 @@ export type AnnotationActionTypes = | SetAnnotationDescriptionAction | SetAnnotationAllowUpdateAction | SetBlockedByUserAction + | SetIsMutexAcquiredAction | SetUserBoundingBoxesAction | ChangeUserBoundingBoxAction | FinishedResizingUserBoundingBoxAction @@ -176,6 +178,12 @@ export const setBlockedByUserAction = (blockedByUser: APIUserCompact | null | un blockedByUser, }) as const; +export const setIsMutexAcquiredAction = (isMutexAcquired: boolean) => + ({ + type: "SET_IS_MUTEX_ACQUIRED", + isMutexAcquired, + }) as const; + // Strictly speaking this is no annotation action but a tracing action, as the boundingBox is saved with // the tracing, hence no ANNOTATION in the action type. export const setUserBoundingBoxesAction = (userBoundingBoxes: Array) => diff --git a/frontend/javascripts/viewer/model/actions/dataset_actions.ts b/frontend/javascripts/viewer/model/actions/dataset_actions.ts index 7c123afc6f4..629d904e3a6 100644 --- a/frontend/javascripts/viewer/model/actions/dataset_actions.ts +++ b/frontend/javascripts/viewer/model/actions/dataset_actions.ts @@ -1,4 +1,5 @@ -import type { APIDataset, CoordinateTransformation } from "types/api_types"; +import type { CoordinateTransformation } from "types/api_types"; +import type { StoreDataset } from "viewer/store"; type SetDatasetAction = ReturnType; type SetLayerMappingsAction = ReturnType; type SetLayerTransformsAction = ReturnType; @@ -16,7 +17,7 @@ export type DatasetAction = | SetLayerHasSegmentIndexAction | EnsureSegmentIndexIsLoadedAction; -export const setDatasetAction = (dataset: APIDataset) => +export const setDatasetAction = (dataset: StoreDataset) => ({ type: "SET_DATASET", dataset, diff --git a/frontend/javascripts/viewer/model/actions/proofread_actions.ts b/frontend/javascripts/viewer/model/actions/proofread_actions.ts index 31141279d10..411cfb1c2d2 100644 --- a/frontend/javascripts/viewer/model/actions/proofread_actions.ts +++ b/frontend/javascripts/viewer/model/actions/proofread_actions.ts @@ -4,7 +4,7 @@ import type { Tree } from "viewer/model/types/tree_types"; export type ProofreadAtPositionAction = ReturnType; export type ClearProofreadingByProductsAction = ReturnType; -export type ProofreadMergeAction = ReturnType; +export type ProofreadMergeAction = ReturnType; export type MinCutAgglomerateAction = ReturnType; export type MinCutAgglomerateWithPositionAction = ReturnType< typeof minCutAgglomerateWithPositionAction @@ -36,7 +36,7 @@ export const clearProofreadingByProducts = () => type: "CLEAR_PROOFREADING_BY_PRODUCTS", }) as const; -export const proofreadMerge = ( +export const proofreadMergeAction = ( position: Vector3 | null, segmentId?: number | null, agglomerateId?: number | null, diff --git a/frontend/javascripts/viewer/model/actions/save_actions.ts b/frontend/javascripts/viewer/model/actions/save_actions.ts index 2335b626c59..d1dccb0d393 100644 --- a/frontend/javascripts/viewer/model/actions/save_actions.ts +++ b/frontend/javascripts/viewer/model/actions/save_actions.ts @@ -6,7 +6,7 @@ import type { UpdateAction, UpdateActionWithIsolationRequirement, UpdateActionWithoutIsolationRequirement, -} from "viewer/model/sagas/update_actions"; +} from "viewer/model/sagas/volume/update_actions"; export type SaveQueueType = "skeleton" | "volume" | "mapping"; export type PushSaveQueueTransaction = { @@ -26,6 +26,8 @@ type DisableSavingAction = ReturnType; export type EnsureTracingsWereDiffedToSaveQueueAction = ReturnType< typeof ensureTracingsWereDiffedToSaveQueueAction >; +export type EnsureMaySaveNowAction = ReturnType; +export type DoneSavingAction = ReturnType; export type SaveAction = | PushSaveQueueTransaction @@ -38,7 +40,9 @@ export type SaveAction = | UndoAction | RedoAction | DisableSavingAction - | EnsureTracingsWereDiffedToSaveQueueAction; + | EnsureTracingsWereDiffedToSaveQueueAction + | EnsureMaySaveNowAction + | DoneSavingAction; // The action creators pushSaveQueueTransaction and pushSaveQueueTransactionIsolated // are typed so that update actions that need isolation are isolated in a group each. @@ -132,3 +136,21 @@ export const ensureTracingsWereDiffedToSaveQueueAction = (callback: (tracingId: type: "ENSURE_TRACINGS_WERE_DIFFED_TO_SAVE_QUEUE", callback, }) as const; + +export const ensureMaySaveNowAction = (callback: () => void) => + ({ + type: "ENSURE_MAY_SAVE_NOW", + callback, + }) as const; + +export const dispatchEnsureMaySaveNowAsync = async (dispatch: Dispatch): Promise => { + const readyDeferred = new Deferred(); + const action = ensureMaySaveNowAction(() => readyDeferred.resolve(null)); + dispatch(action); + await readyDeferred.promise(); +}; + +export const doneSavingAction = () => + ({ + type: "DONE_SAVING", + }) as const; diff --git a/frontend/javascripts/viewer/model/actions/skeletontracing_actions.tsx b/frontend/javascripts/viewer/model/actions/skeletontracing_actions.tsx index 2ac352cf29c..dac947792ff 100644 --- a/frontend/javascripts/viewer/model/actions/skeletontracing_actions.tsx +++ b/frontend/javascripts/viewer/model/actions/skeletontracing_actions.tsx @@ -1,21 +1,15 @@ -import { Modal } from "antd"; -import renderIndependently from "libs/render_independently"; -import messages from "messages"; import type { Key } from "react"; import { batchActions } from "redux-batched-actions"; import type { MetadataEntryProto, ServerSkeletonTracing } from "types/api_types"; import type { AdditionalCoordinate } from "types/api_types"; import type { TreeType, Vector3 } from "viewer/constants"; import { - enforceSkeletonTracing, - getTree, - getTreeAndNode, -} from "viewer/model/accessors/skeletontracing_accessor"; -import { AllUserBoundingBoxActions } from "viewer/model/actions/annotation_actions"; + type AddNewUserBoundingBox, + AllUserBoundingBoxActions, +} from "viewer/model/actions/annotation_actions"; import type { MutableTreeMap, Tree, TreeGroup } from "viewer/model/types/tree_types"; -import type { SkeletonTracing, WebknossosState } from "viewer/store"; -import Store from "viewer/store"; -import RemoveTreeModal from "viewer/view/remove_tree_modal"; +import type { SkeletonTracing } from "viewer/store"; +import type { ApplicableSkeletonUpdateAction } from "../sagas/volume/update_actions"; export type InitializeSkeletonTracingAction = ReturnType; export type CreateNodeAction = ReturnType; @@ -40,7 +34,7 @@ type RequestDeleteBranchPointAction = ReturnType; type SetEdgeVisibilityAction = ReturnType; type AddTreesAndGroupsAction = ReturnType; -type DeleteTreeAction = ReturnType; +export type DeleteTreeAction = ReturnType; type DeleteTreesAction = ReturnType; type ResetSkeletonTracingAction = ReturnType; type SetActiveTreeAction = ReturnType; @@ -65,8 +59,11 @@ type SetTreeGroupAction = ReturnType; type SetShowSkeletonsAction = ReturnType; type SetMergerModeEnabledAction = ReturnType; type UpdateNavigationListAction = ReturnType; +type ApplySkeletonUpdateActionsFromServerAction = ReturnType< + typeof applySkeletonUpdateActionsFromServerAction +>; export type LoadAgglomerateSkeletonAction = ReturnType; -type NoAction = ReturnType; +export type NoAction = ReturnType; export type BatchableUpdateTreeAction = | SetTreeGroupAction @@ -131,7 +128,9 @@ export type SkeletonTracingAction = | SetShowSkeletonsAction | SetMergerModeEnabledAction | UpdateNavigationListAction - | LoadAgglomerateSkeletonAction; + | LoadAgglomerateSkeletonAction + | ApplySkeletonUpdateActionsFromServerAction + | AddNewUserBoundingBox; export const SkeletonTracingSaveRelevantActions = [ "INITIALIZE_SKELETONTRACING", @@ -175,7 +174,7 @@ export const SkeletonTracingSaveRelevantActions = [ ...AllUserBoundingBoxActions, ]; -const noAction = () => +export const noAction = () => ({ type: "NONE", }) as const; @@ -239,7 +238,7 @@ export const deleteEdgeAction = ( }) as const; export const setActiveNodeAction = ( - nodeId: number, + nodeId: number | null, suppressAnimation: boolean = false, suppressCentering: boolean = false, ) => @@ -564,74 +563,6 @@ export const setMergerModeEnabledAction = (active: boolean) => active, }) as const; -// The following actions have the prefix "AsUser" which means that they -// offer some additional logic which is sensible from a user-centered point of view. -// For example, the deleteNodeAsUserAction also initiates the deletion of a tree, -// when the current tree is empty. -export const deleteNodeAsUserAction = ( - state: WebknossosState, - nodeId?: number, - treeId?: number, -): DeleteNodeAction | NoAction | DeleteTreeAction => { - const skeletonTracing = enforceSkeletonTracing(state.annotation); - const treeAndNode = getTreeAndNode(skeletonTracing, nodeId, treeId); - - if (!treeAndNode) { - const tree = getTree(skeletonTracing, treeId); - if (!tree) return noAction(); - - // If the tree is empty, it will be deleted - return tree.nodes.size() === 0 ? deleteTreeAction(tree.treeId) : noAction(); - } - - const [tree, node] = treeAndNode; - - if (state.task != null && node.id === 1) { - // Let the user confirm the deletion of the initial node (node with id 1) of a task - Modal.confirm({ - title: messages["tracing.delete_initial_node"], - onOk: () => { - Store.dispatch(deleteNodeAction(node.id, tree.treeId)); - }, - }); - // As Modal.confirm is async, return noAction() and the modal will dispatch the real action - // if the user confirms - return noAction(); - } - - return deleteNodeAction(node.id, tree.treeId); -}; - -// Let the user confirm the deletion of the initial node (node with id 1) of a task -function confirmDeletingInitialNode(treeId: number) { - Modal.confirm({ - title: messages["tracing.delete_tree_with_initial_node"], - onOk: () => { - Store.dispatch(deleteTreeAction(treeId)); - }, - }); -} - -export const handleDeleteTreeByUser = (treeId?: number) => { - const state = Store.getState(); - const skeletonTracing = enforceSkeletonTracing(state.annotation); - const tree = getTree(skeletonTracing, treeId); - if (!tree) return; - - if (state.task != null && tree.nodes.has(1)) { - confirmDeletingInitialNode(tree.treeId); - } else if (state.userConfiguration.hideTreeRemovalWarning) { - Store.dispatch(deleteTreeAction(tree.treeId)); - } else { - renderIndependently((destroy) => ( - Store.dispatch(deleteTreeAction(tree.treeId))} - destroy={destroy} - /> - )); - } -}; - export const updateNavigationListAction = (list: Array, activeIndex: number) => ({ type: "UPDATE_NAVIGATION_LIST", @@ -639,6 +570,14 @@ export const updateNavigationListAction = (list: Array, activeIndex: num activeIndex, }) as const; +export const applySkeletonUpdateActionsFromServerAction = ( + actions: Array, +) => + ({ + type: "APPLY_SKELETON_UPDATE_ACTIONS_FROM_SERVER", + actions, + }) as const; + export const loadAgglomerateSkeletonAction = ( layerName: string, mappingName: string, diff --git a/frontend/javascripts/viewer/model/actions/skeletontracing_actions_with_effects.tsx b/frontend/javascripts/viewer/model/actions/skeletontracing_actions_with_effects.tsx new file mode 100644 index 00000000000..d6e66ea6e0c --- /dev/null +++ b/frontend/javascripts/viewer/model/actions/skeletontracing_actions_with_effects.tsx @@ -0,0 +1,90 @@ +import { Modal } from "antd"; +import renderIndependently from "libs/render_independently"; +import messages from "messages"; +import { + enforceSkeletonTracing, + getTree, + getTreeAndNode, +} from "viewer/model/accessors/skeletontracing_accessor"; +import type { WebknossosState } from "viewer/store"; +import Store from "viewer/store"; +import RemoveTreeModal from "viewer/view/remove_tree_modal"; +import { + type DeleteNodeAction, + type DeleteTreeAction, + type NoAction, + deleteNodeAction, + deleteTreeAction, + noAction, +} from "./skeletontracing_actions"; + +// The following functions are used as a direct response to a user action. +// The functions may interact with the Store which is why they are in a separate file +// (this avoids cyclic dependencies). +// The functions offer some additional logic which is sensible from a user-centered point of view. +// For example, the deleteNodeAsUserAction also initiates the deletion of a tree, +// when the current tree is empty. +// Ideally, this module should be refactored away (instead the logic should live in sagas). +export const deleteNodeAsUserAction = ( + state: WebknossosState, + nodeId?: number, + treeId?: number, +): DeleteNodeAction | NoAction | DeleteTreeAction => { + const skeletonTracing = enforceSkeletonTracing(state.annotation); + const treeAndNode = getTreeAndNode(skeletonTracing, nodeId, treeId); + + if (!treeAndNode) { + const tree = getTree(skeletonTracing, treeId); + if (!tree) return noAction(); + + // If the tree is empty, it will be deleted + return tree.nodes.size() === 0 ? deleteTreeAction(tree.treeId) : noAction(); + } + + const [tree, node] = treeAndNode; + + if (state.task != null && node.id === 1) { + // Let the user confirm the deletion of the initial node (node with id 1) of a task + Modal.confirm({ + title: messages["tracing.delete_initial_node"], + onOk: () => { + Store.dispatch(deleteNodeAction(node.id, tree.treeId)); + }, + }); + // As Modal.confirm is async, return noAction() and the modal will dispatch the real action + // if the user confirms + return noAction(); + } + + return deleteNodeAction(node.id, tree.treeId); +}; + +// Let the user confirm the deletion of the initial node (node with id 1) of a task +function confirmDeletingInitialNode(treeId: number) { + Modal.confirm({ + title: messages["tracing.delete_tree_with_initial_node"], + onOk: () => { + Store.dispatch(deleteTreeAction(treeId)); + }, + }); +} + +export const handleDeleteTreeByUser = (treeId?: number) => { + const state = Store.getState(); + const skeletonTracing = enforceSkeletonTracing(state.annotation); + const tree = getTree(skeletonTracing, treeId); + if (!tree) return; + + if (state.task != null && tree.nodes.has(1)) { + confirmDeletingInitialNode(tree.treeId); + } else if (state.userConfiguration.hideTreeRemovalWarning) { + Store.dispatch(deleteTreeAction(tree.treeId)); + } else { + renderIndependently((destroy) => ( + Store.dispatch(deleteTreeAction(tree.treeId))} + destroy={destroy} + /> + )); + } +}; diff --git a/frontend/javascripts/viewer/model/actions/volumetracing_actions.ts b/frontend/javascripts/viewer/model/actions/volumetracing_actions.ts index 5e86dc2615b..4d047e08ccc 100644 --- a/frontend/javascripts/viewer/model/actions/volumetracing_actions.ts +++ b/frontend/javascripts/viewer/model/actions/volumetracing_actions.ts @@ -8,6 +8,7 @@ import type { QuickSelectGeometry } from "viewer/geometries/helper_geometries"; import { AllUserBoundingBoxActions } from "viewer/model/actions/annotation_actions"; import type { NumberLike, Segment, SegmentGroup, SegmentMap } from "viewer/store"; import type BucketSnapshot from "../bucket_data_handling/bucket_snapshot"; +import type { ApplicableVolumeUpdateAction } from "../sagas/volume/update_actions"; export type InitializeVolumeTracingAction = ReturnType; export type InitializeEditableMappingAction = ReturnType; @@ -53,6 +54,9 @@ export type SetMappingIsLockedAction = ReturnType; +export type ApplyVolumeUpdateActionsFromServerAction = ReturnType< + typeof applyVolumeUpdateActionsFromServerAction +>; export type ComputeQuickSelectForRectAction = ReturnType; export type ComputeQuickSelectForPointAction = ReturnType; @@ -111,7 +115,8 @@ export type VolumeTracingAction = | CancelQuickSelectAction | ConfirmQuickSelectAction | SetVolumeBucketDataHasChangedAction - | BatchUpdateGroupsAndSegmentsAction; + | BatchUpdateGroupsAndSegmentsAction + | ApplyVolumeUpdateActionsFromServerAction; export const VolumeTracingSaveRelevantActions = [ "CREATE_CELL", @@ -471,3 +476,11 @@ export const setVolumeBucketDataHasChangedAction = (tracingId: string) => type: "SET_VOLUME_BUCKET_DATA_HAS_CHANGED", tracingId, }) as const; + +export const applyVolumeUpdateActionsFromServerAction = ( + actions: Array, +) => + ({ + type: "APPLY_VOLUME_UPDATE_ACTIONS_FROM_SERVER", + actions, + }) as const; diff --git a/frontend/javascripts/viewer/model/bucket_data_handling/bounding_box.ts b/frontend/javascripts/viewer/model/bucket_data_handling/bounding_box.ts index 32de78b782b..24c4c01ed53 100644 --- a/frontend/javascripts/viewer/model/bucket_data_handling/bounding_box.ts +++ b/frontend/javascripts/viewer/model/bucket_data_handling/bounding_box.ts @@ -1,7 +1,8 @@ import { V3 } from "libs/mjs"; import { map3, mod } from "libs/utils"; import _ from "lodash"; -import type { BoundingBoxType, OrthoView, Vector2, Vector3, Vector4 } from "viewer/constants"; +import type { BoundingBoxMinMaxType } from "types/bounding_box"; +import type { OrthoView, Vector2, Vector3, Vector4 } from "viewer/constants"; import constants from "viewer/constants"; import type { BoundingBoxObject } from "viewer/store"; import Dimensions from "../dimensions"; @@ -11,7 +12,7 @@ class BoundingBox { min: Vector3; max: Vector3; - constructor(boundingBox: BoundingBoxType | null | undefined) { + constructor(boundingBox: BoundingBoxMinMaxType | null | undefined) { if (boundingBox == null) { this.min = [Number.NEGATIVE_INFINITY, Number.NEGATIVE_INFINITY, Number.NEGATIVE_INFINITY]; this.max = [Number.POSITIVE_INFINITY, Number.POSITIVE_INFINITY, Number.POSITIVE_INFINITY]; @@ -249,7 +250,7 @@ class BoundingBox { return { topLeft: this.min, width: size[0], height: size[1], depth: size[2] }; } - toBoundingBoxType(): BoundingBoxType { + toBoundingBoxMinMaxType(): BoundingBoxMinMaxType { return { min: this.min, max: this.max, diff --git a/frontend/javascripts/viewer/model/bucket_data_handling/bucket.ts b/frontend/javascripts/viewer/model/bucket_data_handling/bucket.ts index 9fd8bb45cd5..b38978d9fba 100644 --- a/frontend/javascripts/viewer/model/bucket_data_handling/bucket.ts +++ b/frontend/javascripts/viewer/model/bucket_data_handling/bucket.ts @@ -6,7 +6,8 @@ import { type Emitter, createNanoEvents } from "nanoevents"; import * as THREE from "three"; import type { BucketDataArray, ElementClass } from "types/api_types"; import type { AdditionalCoordinate } from "types/api_types"; -import type { BoundingBoxType, BucketAddress, Vector3 } from "viewer/constants"; +import type { BoundingBoxMinMaxType } from "types/bounding_box"; +import type { BucketAddress, Vector3 } from "viewer/constants"; import Constants from "viewer/constants"; import type { MaybeUnmergedBucketLoadedPromise } from "viewer/model/actions/volumetracing_actions"; import { addBucketToUndoAction } from "viewer/model/actions/volumetracing_actions"; @@ -165,7 +166,7 @@ export class DataBucket { this.emitter.emit(event, ...args); } - getBoundingBox(): BoundingBoxType { + getBoundingBox(): BoundingBoxMinMaxType { const min = bucketPositionToGlobalAddress(this.zoomedAddress, this.cube.magInfo); const bucketMag = this.cube.magInfo.getMagByIndexOrThrow(this.zoomedAddress[3]); const max: Vector3 = [ @@ -624,7 +625,7 @@ export class DataBucket { expected: channelCount * Constants.BUCKET_SIZE, channelCount, }; - console.warn("bucket.data has unexpected length", debugInfo); + console.warn(`bucket.data for ${this.zoomedAddress} has unexpected length`, debugInfo); ErrorHandling.notify( new Error(`bucket.data has unexpected length. Details: ${JSON.stringify(debugInfo)}`), ); diff --git a/frontend/javascripts/viewer/model/bucket_data_handling/data_cube.ts b/frontend/javascripts/viewer/model/bucket_data_handling/data_cube.ts index 47b833dce9e..343e610aaf7 100644 --- a/frontend/javascripts/viewer/model/bucket_data_handling/data_cube.ts +++ b/frontend/javascripts/viewer/model/bucket_data_handling/data_cube.ts @@ -14,13 +14,8 @@ import { type Emitter, createNanoEvents } from "nanoevents"; import * as THREE from "three"; import type { AdditionalAxis, BucketDataArray, ElementClass } from "types/api_types"; import type { AdditionalCoordinate } from "types/api_types"; -import type { - BoundingBoxType, - BucketAddress, - LabelMasksByBucketAndW, - Vector3, - Vector4, -} from "viewer/constants"; +import type { BoundingBoxMinMaxType } from "types/bounding_box"; +import type { BucketAddress, LabelMasksByBucketAndW, Vector3, Vector4 } from "viewer/constants"; import constants, { MappingStatusEnum } from "viewer/constants"; import Constants from "viewer/constants"; import { getMappingInfo } from "viewer/model/accessors/dataset_accessor"; @@ -414,7 +409,7 @@ class DataCube { } if (foundCollectibleBucket) { - this.collectBucket(this.buckets[this.bucketIterator]); + this.removeBucket(this.buckets[this.bucketIterator]); } else { const warnMessage = `More than ${this.buckets.length} buckets needed to be allocated.`; @@ -447,11 +442,11 @@ class DataCube { this.bucketIterator = (this.bucketIterator + 1) % (this.buckets.length + 1); } - collectAllBuckets(): void { - this.collectBucketsIf(() => true); + removeAllBuckets(): void { + this.removeBucketsIf(() => true); } - collectBucketsIf(predicateFn: (bucket: DataBucket) => boolean): void { + removeBucketsIf(predicateFn: (bucket: DataBucket) => boolean): void { // This method is always called in the context of reloading data. // All callers should ensure a saved state. This is encapsulated in the // api's reloadBuckets function that is used for most refresh-related @@ -484,7 +479,7 @@ class DataCube { false, ) ) { - this.collectBucket(bucket); + this.removeBucket(bucket); } else { notCollectedBuckets.push(bucket); } @@ -518,7 +513,7 @@ class DataCube { return valueSet; } - collectBucket(bucket: DataBucket): void { + removeBucket(bucket: DataBucket): void { const address = bucket.zoomedAddress; const [bucketIndex, cube] = this.getBucketIndexAndCube(address); @@ -570,7 +565,7 @@ class DataCube { additionalCoordinates: AdditionalCoordinate[] | null, segmentIdNumber: number, dimensionIndices: DimensionMap, - _floodfillBoundingBox: BoundingBoxType, + _floodfillBoundingBox: BoundingBoxMinMaxType, zoomStep: number, progressCallback: ProgressCallback, use3D: boolean, @@ -578,7 +573,7 @@ class DataCube { ): Promise<{ bucketsWithLabeledVoxelsMap: LabelMasksByBucketAndW; wasBoundingBoxExceeded: boolean; - coveredBoundingBox: BoundingBoxType; + coveredBoundingBox: BoundingBoxMinMaxType; }> { // This flood-fill algorithm works in two nested levels and uses a list of buckets to flood fill. // On the inner level a bucket is flood-filled and if the iteration of the buckets data diff --git a/frontend/javascripts/viewer/model/bucket_data_handling/prefetch_strategy_arbitrary.ts b/frontend/javascripts/viewer/model/bucket_data_handling/prefetch_strategy_arbitrary.ts index e59ae18c365..3c1d91fe08f 100644 --- a/frontend/javascripts/viewer/model/bucket_data_handling/prefetch_strategy_arbitrary.ts +++ b/frontend/javascripts/viewer/model/bucket_data_handling/prefetch_strategy_arbitrary.ts @@ -1,7 +1,8 @@ import type { Matrix4x4 } from "libs/mjs"; import { M4x4, V3 } from "libs/mjs"; import type { AdditionalCoordinate } from "types/api_types"; -import type { BoundingBoxType, Vector3 } from "viewer/constants"; +import type { BoundingBoxMinMaxType } from "types/bounding_box"; +import type { Vector3 } from "viewer/constants"; import PolyhedronRasterizer from "viewer/model/bucket_data_handling/polyhedron_rasterizer"; import { AbstractPrefetchStrategy } from "viewer/model/bucket_data_handling/prefetch_strategy_plane"; import type { PullQueueItem } from "viewer/model/bucket_data_handling/pullqueue"; @@ -25,11 +26,11 @@ export class PrefetchStrategyArbitrary extends AbstractPrefetchStrategy { ); getExtentObject( - poly0: BoundingBoxType, - poly1: BoundingBoxType, + poly0: BoundingBoxMinMaxType, + poly1: BoundingBoxMinMaxType, zoom0: number, zoom1: number, - ): BoundingBoxType { + ): BoundingBoxMinMaxType { return { min: [ Math.min(poly0.min[0] << zoom0, poly1.min[0] << zoom1), diff --git a/frontend/javascripts/viewer/model/bucket_data_handling/pushqueue.ts b/frontend/javascripts/viewer/model/bucket_data_handling/pushqueue.ts index 301b5310d39..d7eda22e56e 100644 --- a/frontend/javascripts/viewer/model/bucket_data_handling/pushqueue.ts +++ b/frontend/javascripts/viewer/model/bucket_data_handling/pushqueue.ts @@ -7,7 +7,7 @@ import { createCompressedUpdateBucketActions } from "viewer/model/bucket_data_ha import Store from "viewer/store"; import { escalateErrorAction } from "../actions/actions"; import { pushSaveQueueTransaction } from "../actions/save_actions"; -import type { UpdateActionWithoutIsolationRequirement } from "../sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "../sagas/volume/update_actions"; // Only process the PushQueue after there was no user interaction (or bucket modification due to // downsampling) for PUSH_DEBOUNCE_TIME milliseconds. diff --git a/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts index d484c9b2e9b..84ee056fa4d 100644 --- a/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts @@ -20,8 +20,8 @@ import { } from "viewer/model/accessors/volumetracing_accessor"; import type { DataBucket } from "viewer/model/bucket_data_handling/bucket"; import { bucketPositionToGlobalAddress } from "viewer/model/helpers/position_converter"; -import type { UpdateActionWithoutIsolationRequirement } from "viewer/model/sagas/update_actions"; -import { updateBucket } from "viewer/model/sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "viewer/model/sagas/volume/update_actions"; +import { updateBucket } from "viewer/model/sagas/volume/update_actions"; import type { DataLayerType, VolumeTracing } from "viewer/store"; import Store from "viewer/store"; import ByteArraysToLz4Base64Worker from "viewer/workers/byte_arrays_to_lz4_base64.worker"; diff --git a/frontend/javascripts/viewer/model/edge_collection.ts b/frontend/javascripts/viewer/model/edge_collection.ts index af757261397..6066bd9383e 100644 --- a/frontend/javascripts/viewer/model/edge_collection.ts +++ b/frontend/javascripts/viewer/model/edge_collection.ts @@ -89,10 +89,14 @@ export default class EdgeCollection implements NotEnumerableByObject { } map(fn: (value: Edge) => T): Array { - return this.asArray().map(fn); + return this.toArray().map(fn); } *all(): Generator { + yield* this.values(); + } + + *values(): Generator { for (const edgeArray of this.outMap.values()) { for (const edge of edgeArray) { yield edge; @@ -100,7 +104,7 @@ export default class EdgeCollection implements NotEnumerableByObject { } } - asArray(): Edge[] { + toArray(): Edge[] { return Array.from(this.all()); } diff --git a/frontend/javascripts/viewer/model/helpers/compaction/compact_save_queue.ts b/frontend/javascripts/viewer/model/helpers/compaction/compact_save_queue.ts index 2c7dc893150..55713c42a34 100644 --- a/frontend/javascripts/viewer/model/helpers/compaction/compact_save_queue.ts +++ b/frontend/javascripts/viewer/model/helpers/compaction/compact_save_queue.ts @@ -2,7 +2,7 @@ import _ from "lodash"; import type { UpdateUserBoundingBoxInSkeletonTracingAction, UpdateUserBoundingBoxInVolumeTracingAction, -} from "viewer/model/sagas/update_actions"; +} from "viewer/model/sagas/volume/update_actions"; import type { SaveQueueEntry } from "viewer/store"; function removeAllButLastUpdateActiveItemAndCameraAction( diff --git a/frontend/javascripts/viewer/model/helpers/compaction/compact_toggle_actions.ts b/frontend/javascripts/viewer/model/helpers/compaction/compact_toggle_actions.ts index c7084ddaae2..9f012f54df8 100644 --- a/frontend/javascripts/viewer/model/helpers/compaction/compact_toggle_actions.ts +++ b/frontend/javascripts/viewer/model/helpers/compaction/compact_toggle_actions.ts @@ -8,13 +8,13 @@ import type { UpdateActionWithoutIsolationRequirement, UpdateSegmentVisibilityVolumeAction, UpdateTreeVisibilityUpdateAction, -} from "viewer/model/sagas/update_actions"; +} from "viewer/model/sagas/volume/update_actions"; import { updateSegmentGroupVisibilityVolumeAction, updateSegmentVisibilityVolumeAction, updateTreeGroupVisibility, updateTreeVisibility, -} from "viewer/model/sagas/update_actions"; +} from "viewer/model/sagas/volume/update_actions"; import type { Tree, TreeGroup, TreeMap } from "viewer/model/types/tree_types"; import type { Segment, SegmentMap, SkeletonTracing, VolumeTracing } from "viewer/store"; import { diff --git a/frontend/javascripts/viewer/model/helpers/compaction/compact_update_actions.ts b/frontend/javascripts/viewer/model/helpers/compaction/compact_update_actions.ts index 03f442253e9..fc136d22245 100644 --- a/frontend/javascripts/viewer/model/helpers/compaction/compact_update_actions.ts +++ b/frontend/javascripts/viewer/model/helpers/compaction/compact_update_actions.ts @@ -8,8 +8,8 @@ import type { DeleteNodeUpdateAction, DeleteTreeUpdateAction, UpdateActionWithoutIsolationRequirement, -} from "viewer/model/sagas/update_actions"; -import { moveTreeComponent } from "viewer/model/sagas/update_actions"; +} from "viewer/model/sagas/volume/update_actions"; +import { moveTreeComponent, updateNode } from "viewer/model/sagas/volume/update_actions"; import type { SkeletonTracing, VolumeTracing } from "viewer/store"; // The Cantor pairing function assigns one natural number to each pair of natural numbers @@ -17,7 +17,11 @@ function cantor(a: number, b: number): number { return 0.5 * (a + b) * (a + b + 1) + b; } -function compactMovedNodesAndEdges(updateActions: Array) { +function compactMovedNodesAndEdges( + updateActions: Array, + prevTracing: SkeletonTracing | VolumeTracing, + tracing: SkeletonTracing | VolumeTracing, +) { // This function detects tree merges and splits. // It does so by identifying nodes and edges that were deleted in one tree only to be created // in another tree again afterwards. @@ -28,6 +32,10 @@ function compactMovedNodesAndEdges(updateActions: Array cantor(createUA.value.treeId, deleteUA.value.treeId), - ); + ) as Record< + number, + Array< + | [CreateNodeUpdateAction, DeleteNodeUpdateAction] + | [CreateEdgeUpdateAction, DeleteEdgeUpdateAction] + > + >; // Create a moveTreeComponent update action for each of the groups and insert it at the right spot for (const movedPairings of _.values(groupedMovedNodesAndEdges)) { const actionTracingId = movedPairings[0][1].value.actionTracingId; const oldTreeId = movedPairings[0][1].value.treeId; const newTreeId = movedPairings[0][0].value.treeId; - // This could be done with a .filter(...).map(...), but flow cannot comprehend that - const nodeIds = movedPairings.reduce((agg: number[], [createUA]) => { - if (createUA.name === "createNode") agg.push(createUA.value.id); - return agg; - }, []); + const nodeIds = movedPairings + .filter( + (tuple): tuple is [CreateNodeUpdateAction, DeleteNodeUpdateAction] => + tuple[0].name === "createNode", + ) + .map(([createUA]) => createUA.value.id); + // The moveTreeComponent update action needs to be placed: // BEFORE the possible deleteTree update action of the oldTreeId and // AFTER the possible createTree update action of the newTreeId @@ -96,6 +112,8 @@ function compactMovedNodesAndEdges(updateActions: Array ua.name === "createTree" && ua.value.id === newTreeId, ); + const moveAction = moveTreeComponent(oldTreeId, newTreeId, nodeIds, actionTracingId); + if (deleteTreeUAIndex > -1 && createTreeUAIndex > -1) { // This should not happen, but in case it does, the moveTreeComponent update action // cannot be inserted as the createTreeUA is after the deleteTreeUA @@ -103,29 +121,35 @@ function compactMovedNodesAndEdges(updateActions: Array -1) { // Insert after the createTreeUA - compactedActions.splice( - createTreeUAIndex + 1, - 0, - moveTreeComponent(oldTreeId, newTreeId, nodeIds, actionTracingId), - ); + compactedActions.splice(createTreeUAIndex + 1, 0, moveAction); } else if (deleteTreeUAIndex > -1) { // Insert before the deleteTreeUA - compactedActions.splice( - deleteTreeUAIndex, - 0, - moveTreeComponent(oldTreeId, newTreeId, nodeIds, actionTracingId), - ); + compactedActions.splice(deleteTreeUAIndex, 0, moveAction); } else { // Insert in front compactedActions.unshift(moveTreeComponent(oldTreeId, newTreeId, nodeIds, actionTracingId)); } + // Add updateNode actions if node was changed (by reference) + for (const [createUA, deleteUA] of movedPairings) { + if (createUA.name === "createNode" && deleteUA.name === "deleteNode") { + const nodeId = createUA.value.id; + const newNode = tracing.trees.getNullable(newTreeId)?.nodes.getNullable(nodeId); + const oldNode = prevTracing.trees.getNullable(oldTreeId)?.nodes.getNullable(nodeId); + + if (newNode !== oldNode && newNode != null) { + compactedActions.push(updateNode(newTreeId, newNode, actionTracingId)); + } + } + } + // Remove the original create/delete update actions of the moved nodes and edges. type CreateOrDeleteNodeOrEdge = | CreateNodeUpdateAction | DeleteNodeUpdateAction | CreateEdgeUpdateAction | DeleteEdgeUpdateAction; + compactedActions = withoutValues( compactedActions, // Cast movedPairs type to satisfy _.flatten @@ -157,10 +181,11 @@ function compactDeletedTrees(updateActions: Array, + prevTracing: SkeletonTracing | VolumeTracing, tracing: SkeletonTracing | VolumeTracing, ): Array { return compactToggleActions( - compactDeletedTrees(compactMovedNodesAndEdges(updateActions)), + compactDeletedTrees(compactMovedNodesAndEdges(updateActions, prevTracing, tracing)), tracing, ); } diff --git a/frontend/javascripts/viewer/model/helpers/diff_helpers.ts b/frontend/javascripts/viewer/model/helpers/diff_helpers.ts index 8293ccf977a..e3edd2da2d9 100644 --- a/frontend/javascripts/viewer/model/helpers/diff_helpers.ts +++ b/frontend/javascripts/viewer/model/helpers/diff_helpers.ts @@ -10,7 +10,7 @@ import { updateUserBoundingBoxInVolumeTracing, updateUserBoundingBoxVisibilityInSkeletonTracing, updateUserBoundingBoxVisibilityInVolumeTracing, -} from "viewer/model/sagas/update_actions"; +} from "viewer/model/sagas/volume/update_actions"; import type { UserBoundingBox } from "viewer/store"; import type { TreeGroup } from "../types/tree_types"; diff --git a/frontend/javascripts/viewer/model/helpers/nml_helpers.ts b/frontend/javascripts/viewer/model/helpers/nml_helpers.ts index 1ee85911ae6..3b6f3656aaa 100644 --- a/frontend/javascripts/viewer/model/helpers/nml_helpers.ts +++ b/frontend/javascripts/viewer/model/helpers/nml_helpers.ts @@ -8,13 +8,8 @@ import messages from "messages"; import Saxophone from "saxophone"; import type { APIBuildInfoWk, MetadataEntryProto } from "types/api_types"; import type { AdditionalCoordinate } from "types/api_types"; -import { - type BoundingBoxType, - IdentityTransform, - type TreeType, - TreeTypeEnum, - type Vector3, -} from "viewer/constants"; +import type { BoundingBoxMinMaxType } from "types/bounding_box"; +import { IdentityTransform, type TreeType, TreeTypeEnum, type Vector3 } from "viewer/constants"; import Constants from "viewer/constants"; import { getPosition, getRotation } from "viewer/model/accessors/flycam_accessor"; import EdgeCollection from "viewer/model/edge_collection"; @@ -201,7 +196,7 @@ function serializeMetaInformation( } function serializeTaskBoundingBox( - boundingBox: BoundingBoxType | null | undefined, + boundingBox: BoundingBoxMinMaxType | null | undefined, tagName: string, ): string { if (boundingBox) { @@ -1120,6 +1115,7 @@ export function parseNml(nmlString: string): Promise<{ case "volume": { isParsingVolumeTag = true; containedVolumes = true; + break; } default: @@ -1172,6 +1168,7 @@ export function parseNml(nmlString: string): Promise<{ case "volume": { isParsingVolumeTag = false; + break; } default: diff --git a/frontend/javascripts/viewer/model/helpers/position_converter.ts b/frontend/javascripts/viewer/model/helpers/position_converter.ts index 2162345de6d..c1fd8123872 100644 --- a/frontend/javascripts/viewer/model/helpers/position_converter.ts +++ b/frontend/javascripts/viewer/model/helpers/position_converter.ts @@ -18,6 +18,22 @@ export function globalPositionToBucketPosition( additionalCoordinates || [], ]; } + +export function globalPositionToBucketPositionWithMag( + [x, y, z]: Vector3, + mag: Vector3, + additionalCoordinates: AdditionalCoordinate[] | null | undefined, +): BucketAddress { + const magIndex = Math.log2(Math.max(...mag)); + return [ + Math.floor(x / (constants.BUCKET_WIDTH * mag[0])), + Math.floor(y / (constants.BUCKET_WIDTH * mag[1])), + Math.floor(z / (constants.BUCKET_WIDTH * mag[2])), + magIndex, + additionalCoordinates || [], + ]; +} + export function scaleGlobalPositionWithMagnification( [x, y, z]: Vector3, mag: Vector3, diff --git a/frontend/javascripts/viewer/model/reducers/annotation_reducer.ts b/frontend/javascripts/viewer/model/reducers/annotation_reducer.ts index 40ae372612e..d23f5a4aae5 100644 --- a/frontend/javascripts/viewer/model/reducers/annotation_reducer.ts +++ b/frontend/javascripts/viewer/model/reducers/annotation_reducer.ts @@ -137,6 +137,13 @@ function AnnotationReducer(state: WebknossosState, action: Action): WebknossosSt }); } + case "SET_IS_MUTEX_ACQUIRED": { + const { isMutexAcquired } = action; + return updateKey(state, "annotation", { + isMutexAcquired, + }); + } + case "SET_USER_BOUNDING_BOXES": { return updateUserBoundingBoxes(state, action.userBoundingBoxes); } @@ -151,8 +158,6 @@ function AnnotationReducer(state: WebknossosState, action: Action): WebknossosSt const updatedUserBoundingBoxes = tracing.userBoundingBoxes.map((bbox) => bbox.id === action.id ? { - // @ts-expect-error ts-migrate(2783) FIXME: 'id' is specified more than once, so this usage wi... Remove this comment to see the full error message - id: bbox.id, ...bbox, ...action.newProps, } @@ -210,7 +215,7 @@ function AnnotationReducer(state: WebknossosState, action: Action): WebknossosSt // Only update the bounding box if the bounding box overlaps with the dataset bounds. // Else the bounding box is completely outside the dataset bounds -> in that case just keep the bounding box and let the user cook. if (newBoundingBoxWithinDataset.getVolume() > 0) { - newUserBoundingBox.boundingBox = newBoundingBoxWithinDataset.toBoundingBoxType(); + newUserBoundingBox.boundingBox = newBoundingBoxWithinDataset.toBoundingBoxMinMaxType(); } const updatedUserBoundingBoxes = [...userBoundingBoxes, newUserBoundingBox]; diff --git a/frontend/javascripts/viewer/model/reducers/reducer_helpers.ts b/frontend/javascripts/viewer/model/reducers/reducer_helpers.ts index a54ad44821d..c75e588aeb7 100644 --- a/frontend/javascripts/viewer/model/reducers/reducer_helpers.ts +++ b/frontend/javascripts/viewer/model/reducers/reducer_helpers.ts @@ -2,13 +2,14 @@ import * as Utils from "libs/utils"; import type { APIAnnotation, AdditionalAxis, - ServerAdditionalAxis, - ServerBoundingBox, + AdditionalAxisProto, + BoundingBoxProto, SkeletonUserState, - UserBoundingBoxFromServer, + UserBoundingBoxProto, VolumeUserState, } from "types/api_types"; -import type { BoundingBoxType } from "viewer/constants"; +import type { BoundingBoxMinMaxType } from "types/bounding_box"; +import type { Vector3 } from "viewer/constants"; import type { AnnotationTool, AnnotationToolId } from "viewer/model/accessors/tool_accessor"; import { Toolkits } from "viewer/model/accessors/tool_accessor"; import { updateKey } from "viewer/model/helpers/deep_update"; @@ -17,41 +18,62 @@ import type { BoundingBoxObject, SegmentGroup, UserBoundingBox, - UserBoundingBoxToServer, + UserBoundingBoxForServer, UserBoundingBoxWithOptIsVisible, WebknossosState, } from "viewer/store"; import { type DisabledInfo, getDisabledInfoForTools } from "../accessors/disabled_tool_accessor"; +import type { UpdateUserBoundingBoxInSkeletonTracingAction } from "../sagas/volume/update_actions"; import type { Tree, TreeGroup } from "../types/tree_types"; -export function convertServerBoundingBoxToBoundingBox( - boundingBox: ServerBoundingBox, -): BoundingBoxType { - return Utils.computeBoundingBoxFromArray( - Utils.concatVector3(Utils.point3ToVector3(boundingBox.topLeft), [ - boundingBox.width, - boundingBox.height, - boundingBox.depth, - ]), - ); +function convertServerBoundingBoxToBoundingBoxMinMaxType( + boundingBox: BoundingBoxProto, +): BoundingBoxMinMaxType { + const min = Utils.point3ToVector3(boundingBox.topLeft); + const max: Vector3 = [ + min[0] + boundingBox.width, + min[1] + boundingBox.height, + min[2] + boundingBox.depth, + ]; + return { min, max }; } export function convertServerBoundingBoxToFrontend( - boundingBox: ServerBoundingBox | null | undefined, -): BoundingBoxType | null | undefined { - if (!boundingBox) return null; - return convertServerBoundingBoxToBoundingBox(boundingBox); + boundingBox: BoundingBoxProto | null | undefined, +): BoundingBoxMinMaxType | null | undefined { + if (!boundingBox) return boundingBox; + return convertServerBoundingBoxToBoundingBoxMinMaxType(boundingBox); +} + +export function convertUserBoundingBoxFromUpdateActionToFrontend( + bboxValue: UpdateUserBoundingBoxInSkeletonTracingAction["value"], +): Partial { + const { + boundingBox, + boundingBoxId: _boundingBoxId, + actionTracingId: _actionTracingId, + ...valueWithoutBoundingBox + } = bboxValue; + const maybeBoundingBoxValue = + boundingBox != null + ? { boundingBox: Utils.computeBoundingBoxFromBoundingBoxObject(boundingBox) } + : {}; + + return { + ...valueWithoutBoundingBox, + ...maybeBoundingBoxValue, + }; } export function convertUserBoundingBoxesFromServerToFrontend( - boundingBoxes: Array, + boundingBoxes: Array, userState: SkeletonUserState | VolumeUserState | undefined, ): Array { const idToVisible = userState ? Utils.mapEntriesToMap(userState.boundingBoxVisibilities) : {}; return boundingBoxes.map((bb) => { const { color, id, name, isVisible, boundingBox } = bb; - const convertedBoundingBox = convertServerBoundingBoxToBoundingBox(boundingBox); + const convertedBoundingBox = convertServerBoundingBoxToBoundingBoxMinMaxType(boundingBox); return { boundingBox: convertedBoundingBox, color: color ? Utils.colorObjectToRGBArray(color) : Utils.getRandomColor(), @@ -64,13 +86,13 @@ export function convertUserBoundingBoxesFromServerToFrontend( export function convertUserBoundingBoxFromFrontendToServer( boundingBox: UserBoundingBoxWithOptIsVisible, -): UserBoundingBoxToServer { +): UserBoundingBoxForServer { const { boundingBox: bb, ...rest } = boundingBox; return { ...rest, boundingBox: Utils.computeBoundingBoxObjectFromBoundingBox(bb) }; } export function convertFrontendBoundingBoxToServer( - boundingBox: BoundingBoxType, + boundingBox: BoundingBoxMinMaxType, ): BoundingBoxObject { return { topLeft: boundingBox.min, @@ -80,7 +102,7 @@ export function convertFrontendBoundingBoxToServer( }; } -export function convertPointToVecInBoundingBox(boundingBox: ServerBoundingBox): BoundingBoxObject { +export function convertBoundingBoxProtoToObject(boundingBox: BoundingBoxProto): BoundingBoxObject { return { width: boundingBox.width, height: boundingBox.height, @@ -134,11 +156,12 @@ export function convertServerAnnotationToFrontendAnnotation( othersMayEdit, annotationLayers, blockedByUser: null, + isMutexAcquired: false, }; } export function convertServerAdditionalAxesToFrontEnd( - additionalAxes: ServerAdditionalAxis[], + additionalAxes: AdditionalAxisProto[], ): AdditionalAxis[] { return additionalAxes.map((coords) => ({ ...coords, @@ -210,6 +233,7 @@ export function setToolReducer(state: WebknossosState, tool: AnnotationTool) { const disabledToolInfo = getDisabledInfoForTools(state); if (!isToolAvailable(state, disabledToolInfo, tool)) { + console.log(`Cannot switch to ${tool.readableName} because it's not available.`); return state; } diff --git a/frontend/javascripts/viewer/model/reducers/save_reducer.ts b/frontend/javascripts/viewer/model/reducers/save_reducer.ts index 29c3e097c83..0c9a07de4d6 100644 --- a/frontend/javascripts/viewer/model/reducers/save_reducer.ts +++ b/frontend/javascripts/viewer/model/reducers/save_reducer.ts @@ -5,7 +5,7 @@ import { type TracingStats, getStats } from "viewer/model/accessors/annotation_a import type { Action } from "viewer/model/actions/actions"; import { getActionLog } from "viewer/model/helpers/action_logger_middleware"; import { updateKey, updateKey2 } from "viewer/model/helpers/deep_update"; -import { MAXIMUM_ACTION_COUNT_PER_BATCH } from "viewer/model/sagas/save_saga_constants"; +import { MAXIMUM_ACTION_COUNT_PER_BATCH } from "viewer/model/sagas/saving/save_saga_constants"; import type { SaveState, WebknossosState } from "viewer/store"; // These update actions are not idempotent. Having them diff --git a/frontend/javascripts/viewer/model/reducers/skeletontracing_reducer.ts b/frontend/javascripts/viewer/model/reducers/skeletontracing_reducer.ts index 00226b96480..dfca78bd3b1 100644 --- a/frontend/javascripts/viewer/model/reducers/skeletontracing_reducer.ts +++ b/frontend/javascripts/viewer/model/reducers/skeletontracing_reducer.ts @@ -6,7 +6,7 @@ import * as Utils from "libs/utils"; import _ from "lodash"; import type { MetadataEntryProto } from "types/api_types"; import { userSettings } from "types/schemas/user_settings.schema"; -import Constants, { TreeTypeEnum } from "viewer/constants"; +import { TreeTypeEnum } from "viewer/constants"; import { areGeometriesTransformed, findTreeByNodeId, @@ -36,6 +36,7 @@ import { deleteNode, deleteTrees, ensureTreeNames, + getMaximumNodeId, getOrCreateTree, mergeTrees, removeMissingGroupsFromTrees, @@ -54,8 +55,13 @@ import { } from "viewer/view/right-border-tabs/trees_tab/tree_hierarchy_view_helpers"; import { getUserStateForTracing } from "../accessors/annotation_accessor"; import { max, maxBy } from "../helpers/iterator_utils"; +import { applySkeletonUpdateActionsFromServer } from "./update_action_application/skeleton"; -function SkeletonTracingReducer(state: WebknossosState, action: Action): WebknossosState { +function SkeletonTracingReducer( + state: WebknossosState, + action: Action, + ignoreAllowUpdate: boolean = false, +): WebknossosState { if (action.type === "INITIALIZE_SKELETONTRACING") { const userState = getUserStateForTracing( action.tracing, @@ -68,9 +74,7 @@ function SkeletonTracingReducer(state: WebknossosState, action: Action): Webknos let activeNodeId = userState?.activeNodeId ?? action.tracing.activeNodeId; const treeGroups = applyUserStateToGroups(action.tracing.treeGroups || [], userState); - let cachedMaxNodeId = max(trees.values().flatMap((__) => __.nodes.map((node) => node.id))); - - cachedMaxNodeId = cachedMaxNodeId != null ? cachedMaxNodeId : Constants.MIN_NODE_ID - 1; + const cachedMaxNodeId = getMaximumNodeId(trees); let activeTreeId = null; @@ -134,6 +138,9 @@ function SkeletonTracingReducer(state: WebknossosState, action: Action): Webknos skeleton: { $set: skeletonTracing, }, + readOnly: { + $set: null, + }, }, }); } @@ -150,6 +157,25 @@ function SkeletonTracingReducer(state: WebknossosState, action: Action): Webknos switch (action.type) { case "SET_ACTIVE_NODE": { const { nodeId } = action; + + if (nodeId == null) { + return update(state, { + annotation: { + skeleton: { + activeNodeId: { + $set: null, + }, + activeTreeId: { + $set: null, + }, + activeGroupId: { + $set: null, + }, + }, + }, + }); + } + const tree = findTreeByNodeId(skeletonTracing.trees, nodeId); if (tree) { return update(state, { @@ -629,6 +655,17 @@ function SkeletonTracingReducer(state: WebknossosState, action: Action): Webknos }); } + case "APPLY_SKELETON_UPDATE_ACTIONS_FROM_SERVER": { + const { actions } = action; + return applySkeletonUpdateActionsFromServer( + // Pass a SkeletonTracingReducer that ignores allowUpdate because + // we want to be able to apply updates even in read-only views. + (state: WebknossosState, action: Action) => SkeletonTracingReducer(state, action, true), + actions, + state, + ); + } + default: // pass } @@ -637,7 +674,9 @@ function SkeletonTracingReducer(state: WebknossosState, action: Action): Webknos */ const { restrictions } = state.annotation; const { allowUpdate } = restrictions; - if (!allowUpdate) return state; + if (!(allowUpdate || ignoreAllowUpdate)) { + return state; + } switch (action.type) { case "CREATE_NODE": { @@ -645,6 +684,7 @@ function SkeletonTracingReducer(state: WebknossosState, action: Action): Webknos // Don't create nodes if the skeleton layer is rendered with transforms. return state; } + const { position, rotation, viewport, mag, treeId, timestamp, additionalCoordinates } = action; const tree = getOrCreateTree(state, skeletonTracing, treeId, timestamp, TreeTypeEnum.DEFAULT); diff --git a/frontend/javascripts/viewer/model/reducers/skeletontracing_reducer_helpers.ts b/frontend/javascripts/viewer/model/reducers/skeletontracing_reducer_helpers.ts index e93ede8f04b..f0ed93ea6c3 100644 --- a/frontend/javascripts/viewer/model/reducers/skeletontracing_reducer_helpers.ts +++ b/frontend/javascripts/viewer/model/reducers/skeletontracing_reducer_helpers.ts @@ -707,7 +707,7 @@ export function mergeTrees( const updatedSourceTree: Tree = { ...sourceTree, nodes: newNodes, - edges: sourceTree.edges.addEdges(targetTree.edges.asArray().concat(newEdge)), + edges: sourceTree.edges.addEdges(targetTree.edges.toArray().concat(newEdge)), comments: sourceTree.comments.concat(targetTree.comments), branchPoints: sourceTree.branchPoints.concat(targetTree.branchPoints), }; diff --git a/frontend/javascripts/viewer/model/reducers/update_action_application/bounding_box.ts b/frontend/javascripts/viewer/model/reducers/update_action_application/bounding_box.ts new file mode 100644 index 00000000000..ee920f7a245 --- /dev/null +++ b/frontend/javascripts/viewer/model/reducers/update_action_application/bounding_box.ts @@ -0,0 +1,99 @@ +import update from "immutability-helper"; +import * as Utils from "libs/utils"; +import type { + AddUserBoundingBoxInSkeletonTracingAction, + AddUserBoundingBoxInVolumeTracingAction, + DeleteUserBoundingBoxInSkeletonTracingAction, + DeleteUserBoundingBoxInVolumeTracingAction, + UpdateUserBoundingBoxInSkeletonTracingAction, + UpdateUserBoundingBoxInVolumeTracingAction, +} from "viewer/model/sagas/volume/update_actions"; +import type { + SkeletonTracing, + UserBoundingBox, + VolumeTracing, + WebknossosState, +} from "viewer/store"; +import { convertUserBoundingBoxFromUpdateActionToFrontend } from "../reducer_helpers"; + +export function applyUpdateUserBoundingBox( + newState: WebknossosState, + tracing: SkeletonTracing | VolumeTracing, + ua: UpdateUserBoundingBoxInSkeletonTracingAction | UpdateUserBoundingBoxInVolumeTracingAction, +) { + const updatedUserBoundingBoxes = tracing.userBoundingBoxes.map( + (bbox): UserBoundingBox => + bbox.id === ua.value.boundingBoxId + ? { ...bbox, ...convertUserBoundingBoxFromUpdateActionToFrontend(ua.value) } + : bbox, + ); + + return handleUserBoundingBoxUpdateInTracing(newState, tracing, updatedUserBoundingBoxes); +} + +export function applyAddUserBoundingBox( + newState: WebknossosState, + tracing: SkeletonTracing | VolumeTracing, + ua: AddUserBoundingBoxInSkeletonTracingAction | AddUserBoundingBoxInVolumeTracingAction, +) { + const { boundingBox, ...valueWithoutBoundingBox } = ua.value.boundingBox; + const boundingBoxValue = { + boundingBox: Utils.computeBoundingBoxFromBoundingBoxObject(boundingBox), + }; + const newUserBBox: UserBoundingBox = { + // The visibility is stored per user. Therefore, we default to true here. + isVisible: true, + ...valueWithoutBoundingBox, + ...boundingBoxValue, + }; + const updatedUserBoundingBoxes = tracing.userBoundingBoxes.concat([newUserBBox]); + + return handleUserBoundingBoxUpdateInTracing(newState, tracing, updatedUserBoundingBoxes); +} + +export function applyDeleteUserBoundingBox( + newState: WebknossosState, + tracing: SkeletonTracing | VolumeTracing, + ua: DeleteUserBoundingBoxInSkeletonTracingAction | DeleteUserBoundingBoxInVolumeTracingAction, +) { + const updatedUserBoundingBoxes = tracing.userBoundingBoxes.filter( + (bbox) => bbox.id !== ua.value.boundingBoxId, + ); + + return handleUserBoundingBoxUpdateInTracing(newState, tracing, updatedUserBoundingBoxes); +} + +function handleUserBoundingBoxUpdateInTracing( + state: WebknossosState, + tracing: SkeletonTracing | VolumeTracing, + updatedUserBoundingBoxes: UserBoundingBox[], +) { + if (tracing.type === "skeleton") { + return update(state, { + annotation: { + skeleton: { + userBoundingBoxes: { + $set: updatedUserBoundingBoxes, + }, + }, + }, + }); + } + + const newVolumes = state.annotation.volumes.map((volumeTracing) => + tracing.tracingId === volumeTracing.tracingId + ? { + ...volumeTracing, + userBoundingBoxes: updatedUserBoundingBoxes, + } + : volumeTracing, + ); + + return update(state, { + annotation: { + volumes: { + $set: newVolumes, + }, + }, + }); +} diff --git a/frontend/javascripts/viewer/model/reducers/update_action_application/skeleton.ts b/frontend/javascripts/viewer/model/reducers/update_action_application/skeleton.ts new file mode 100644 index 00000000000..039224a3137 --- /dev/null +++ b/frontend/javascripts/viewer/model/reducers/update_action_application/skeleton.ts @@ -0,0 +1,356 @@ +import update from "immutability-helper"; +import DiffableMap from "libs/diffable_map"; +import { enforceSkeletonTracing, getTree } from "viewer/model/accessors/skeletontracing_accessor"; +import { + setTreeEdgeVisibilityAction, + setTreeGroupsAction, +} from "viewer/model/actions/skeletontracing_actions"; +import EdgeCollection from "viewer/model/edge_collection"; +import type { ApplicableSkeletonUpdateAction } from "viewer/model/sagas/volume/update_actions"; +import type { Tree } from "viewer/model/types/tree_types"; +import type { Reducer, WebknossosState } from "viewer/store"; +import { getMaximumNodeId } from "../skeletontracing_reducer_helpers"; +import { + applyAddUserBoundingBox, + applyDeleteUserBoundingBox, + applyUpdateUserBoundingBox, +} from "./bounding_box"; + +export function applySkeletonUpdateActionsFromServer( + SkeletonTracingReducer: Reducer, + actions: ApplicableSkeletonUpdateAction[], + state: WebknossosState, +): WebknossosState { + let newState = state; + for (const ua of actions) { + newState = applySingleAction(SkeletonTracingReducer, ua, newState); + } + + return newState; +} + +function applySingleAction( + SkeletonTracingReducer: Reducer, + ua: ApplicableSkeletonUpdateAction, + state: WebknossosState, +): WebknossosState { + switch (ua.name) { + case "createTree": { + // updatedId is part of the updateAction format but was never really used. + const { id, updatedId: _updatedId, actionTracingId: _actionTracingId, ...rest } = ua.value; + const newTree: Tree = { + treeId: id, + ...rest, + nodes: new DiffableMap(), + edges: new EdgeCollection(), + }; + const newTrees = enforceSkeletonTracing(state.annotation).trees.set(id, newTree); + + return update(state, { + annotation: { + skeleton: { + trees: { + $set: newTrees, + }, + }, + }, + }); + } + case "updateTree": { + const { + id: treeId, + actionTracingId: _actionTracingId, + // updatedId is part of the updateAction format but was never really used. + updatedId: _updatedId, + ...treeRest + } = ua.value; + const skeleton = enforceSkeletonTracing(state.annotation); + const tree = getTree(skeleton, treeId); + if (tree == null) { + throw new Error("Could not create node because tree was not found."); + } + const newTree = { ...tree, ...treeRest }; + const newTrees = skeleton.trees.set(newTree.treeId, newTree); + return update(state, { + annotation: { + skeleton: { + trees: { + $set: newTrees, + }, + }, + }, + }); + } + case "createNode": { + const { treeId, ...serverNode } = ua.value; + const { + position: untransformedPosition, + resolution: mag, + actionTracingId: _actionTracingId, + ...node + } = serverNode; + const clientNode = { untransformedPosition, mag, ...node }; + + const skeleton = enforceSkeletonTracing(state.annotation); + const tree = getTree(skeleton, treeId); + if (tree == null) { + throw new Error("Could not create node because tree was not found."); + } + const diffableNodeMap = tree.nodes; + const newDiffableMap = diffableNodeMap.set(node.id, clientNode); + const newTree = update(tree, { + nodes: { $set: newDiffableMap }, + }); + const newTrees = skeleton.trees.set(newTree.treeId, newTree); + + return update(state, { + annotation: { + skeleton: { + trees: { + $set: newTrees, + }, + cachedMaxNodeId: { $set: getMaximumNodeId(newTrees) }, + }, + }, + }); + } + case "updateNode": { + const { treeId, ...serverNode } = ua.value; + const { + position: untransformedPosition, + actionTracingId: _actionTracingId, + mag, + ...node + } = serverNode; + const clientNode = { untransformedPosition, mag, ...node }; + + const skeleton = enforceSkeletonTracing(state.annotation); + const tree = getTree(skeleton, treeId); + if (tree == null) { + throw new Error("Could not update node because tree was not found."); + } + const diffableNodeMap = tree.nodes; + const newDiffableMap = diffableNodeMap.set(node.id, clientNode); + const newTree = update(tree, { + nodes: { $set: newDiffableMap }, + }); + const newTrees = skeleton.trees.set(newTree.treeId, newTree); + + return update(state, { + annotation: { + skeleton: { + trees: { + $set: newTrees, + }, + }, + }, + }); + } + case "deleteTree": { + const { id } = ua.value; + const skeleton = enforceSkeletonTracing(state.annotation); + const updatedTrees = skeleton.trees.delete(id); + + const newActiveTreeId = skeleton.activeTreeId === id ? null : skeleton.activeTreeId; + + return update(state, { + annotation: { + skeleton: { + trees: { $set: updatedTrees }, + cachedMaxNodeId: { $set: getMaximumNodeId(updatedTrees) }, + activeTreeId: { $set: newActiveTreeId }, + }, + }, + }); + } + case "moveTreeComponent": { + // Use the _ prefix to ensure that the following code rather + // uses the nodeIdSet. + const { nodeIds: _nodeIds, sourceId, targetId } = ua.value; + const nodeIdSet = new Set(_nodeIds); + + const skeleton = enforceSkeletonTracing(state.annotation); + const sourceTree = getTree(skeleton, sourceId); + const targetTree = getTree(skeleton, targetId); + + if (!sourceTree || !targetTree) { + throw new Error("Source or target tree not found."); + } + + // Separate moved and remaining nodes + const movedNodeEntries = sourceTree.nodes + .entries() + .filter(([id]) => nodeIdSet.has(id)) + .toArray(); + const remainingNodeEntries = sourceTree.nodes + .entries() + .filter(([id]) => !nodeIdSet.has(id)) + .toArray(); + + // Separate moved and remaining edges + const movedEdges = sourceTree.edges + .toArray() + .filter((e) => nodeIdSet.has(e.source) && nodeIdSet.has(e.target)); + const remainingEdges = sourceTree.edges + .toArray() + .filter((e) => !(nodeIdSet.has(e.source) && nodeIdSet.has(e.target))); + + // Create updated source tree + const updatedSourceTree = { + ...sourceTree, + nodes: new DiffableMap(remainingNodeEntries), + edges: new EdgeCollection().addEdges(remainingEdges), + }; + + // Create updated target tree + const updatedTargetNodes = targetTree.nodes.clone(); + for (const [id, node] of movedNodeEntries) { + updatedTargetNodes.mutableSet(id, node); + } + + const updatedTargetEdges = targetTree.edges.clone().addEdges(movedEdges, true); + + const updatedTargetTree = { + ...targetTree, + nodes: updatedTargetNodes, + edges: updatedTargetEdges, + }; + + const updatedTrees = skeleton.trees + .set(sourceId, updatedSourceTree) + .set(targetId, updatedTargetTree); + + return update(state, { + annotation: { + skeleton: { + trees: { $set: updatedTrees }, + }, + }, + }); + } + case "createEdge": { + const { treeId, source, target } = ua.value; + // eslint-disable-next-line no-loop-func + if (state.annotation.skeleton == null) { + throw new Error("Could not apply update action because no skeleton exists."); + } + + const tree = getTree(state.annotation.skeleton, treeId); + if (tree == null) { + throw new Error( + `Could not apply update action because tree with id=${treeId} was not found.`, + ); + } + const newEdge = { + source, + target, + }; + const edges = tree.edges.addEdge(newEdge); + const newTree = update(tree, { edges: { $set: edges } }); + const newTrees = state.annotation.skeleton.trees.set(tree.treeId, newTree); + + return update(state, { + annotation: { + skeleton: { + trees: { + $set: newTrees, + }, + }, + }, + }); + } + case "deleteEdge": { + const { treeId, source, target } = ua.value; + + const skeleton = enforceSkeletonTracing(state.annotation); + const tree = getTree(skeleton, treeId); + + if (!tree) { + throw new Error("Source or target tree not found."); + } + + const updatedTree = { + ...tree, + edges: tree.edges.removeEdge({ source, target }), + }; + + const updatedTrees = skeleton.trees.set(treeId, updatedTree); + + return update(state, { + annotation: { + skeleton: { + trees: { $set: updatedTrees }, + }, + }, + }); + } + + case "deleteNode": { + const { treeId, nodeId } = ua.value; + + const skeleton = enforceSkeletonTracing(state.annotation); + const tree = getTree(skeleton, treeId); + + if (!tree) { + throw new Error("Source or target tree not found."); + } + + const updatedTree = { + ...tree, + nodes: tree.nodes.delete(nodeId), + }; + + const updatedTrees = skeleton.trees.set(treeId, updatedTree); + + const newActiveNodeId = skeleton.activeNodeId === nodeId ? null : skeleton.activeNodeId; + + return update(state, { + annotation: { + skeleton: { + trees: { $set: updatedTrees }, + cachedMaxNodeId: { $set: getMaximumNodeId(updatedTrees) }, + activeNodeId: { $set: newActiveNodeId }, + }, + }, + }); + } + + case "updateTreeGroups": { + return SkeletonTracingReducer(state, setTreeGroupsAction(ua.value.treeGroups)); + } + + case "updateTreeGroupsExpandedState": { + // changes to user specific state does not need to be reacted to + return state; + } + + case "updateTreeEdgesVisibility": { + return SkeletonTracingReducer( + state, + setTreeEdgeVisibilityAction(ua.value.treeId, ua.value.edgesAreVisible), + ); + } + + case "updateUserBoundingBoxInSkeletonTracing": { + return applyUpdateUserBoundingBox(state, enforceSkeletonTracing(state.annotation), ua); + } + case "addUserBoundingBoxInSkeletonTracing": { + return applyAddUserBoundingBox(state, enforceSkeletonTracing(state.annotation), ua); + } + case "updateUserBoundingBoxVisibilityInSkeletonTracing": { + // Visibility updates are user-specific and don't need to be + // incorporated for the current user. + return state; + } + case "deleteUserBoundingBoxInSkeletonTracing": { + return applyDeleteUserBoundingBox(state, enforceSkeletonTracing(state.annotation), ua); + } + default: { + ua satisfies never; + } + } + ua satisfies never; + + // Satisfy TS. + throw new Error("Reached unexpected part of function."); +} diff --git a/frontend/javascripts/viewer/model/reducers/update_action_application/volume.ts b/frontend/javascripts/viewer/model/reducers/update_action_application/volume.ts new file mode 100644 index 00000000000..a9a86df1e64 --- /dev/null +++ b/frontend/javascripts/viewer/model/reducers/update_action_application/volume.ts @@ -0,0 +1,107 @@ +import { getVolumeTracingById } from "viewer/model/accessors/volumetracing_accessor"; +import { + removeSegmentAction, + setSegmentGroupsAction, + updateSegmentAction, +} from "viewer/model/actions/volumetracing_actions"; +import type { ApplicableVolumeUpdateAction } from "viewer/model/sagas/volume/update_actions"; +import type { Segment, WebknossosState } from "viewer/store"; +import type { VolumeTracingReducerAction } from "../volumetracing_reducer"; +import { setLargestSegmentIdReducer } from "../volumetracing_reducer_helpers"; +import { + applyAddUserBoundingBox, + applyDeleteUserBoundingBox, + applyUpdateUserBoundingBox, +} from "./bounding_box"; + +export function applyVolumeUpdateActionsFromServer( + actions: ApplicableVolumeUpdateAction[], + state: WebknossosState, + VolumeTracingReducer: ( + state: WebknossosState, + action: VolumeTracingReducerAction, + ) => WebknossosState, +): WebknossosState { + let newState = state; + for (const ua of actions) { + newState = applySingleAction(ua, newState, VolumeTracingReducer); + } + + return newState; +} + +function applySingleAction( + ua: ApplicableVolumeUpdateAction, + state: WebknossosState, + VolumeTracingReducer: ( + state: WebknossosState, + action: VolumeTracingReducerAction, + ) => WebknossosState, +): WebknossosState { + switch (ua.name) { + case "updateLargestSegmentId": { + const volumeTracing = getVolumeTracingById(state.annotation, ua.value.actionTracingId); + return setLargestSegmentIdReducer(state, volumeTracing, ua.value.largestSegmentId); + } + case "createSegment": + case "updateSegment": { + const { actionTracingId, ...originalSegment } = ua.value; + const { anchorPosition, ...segmentWithoutAnchor } = originalSegment; + const segment: Partial = { + somePosition: anchorPosition ?? undefined, + ...segmentWithoutAnchor, + }; + return VolumeTracingReducer( + state, + updateSegmentAction(originalSegment.id, segment, actionTracingId), + ); + } + case "deleteSegment": { + return VolumeTracingReducer( + state, + removeSegmentAction(ua.value.id, ua.value.actionTracingId), + ); + } + case "updateSegmentGroups": { + return VolumeTracingReducer( + state, + setSegmentGroupsAction(ua.value.segmentGroups, ua.value.actionTracingId), + ); + } + case "updateUserBoundingBoxInVolumeTracing": { + return applyUpdateUserBoundingBox( + state, + getVolumeTracingById(state.annotation, ua.value.actionTracingId), + ua, + ); + } + case "addUserBoundingBoxInVolumeTracing": { + return applyAddUserBoundingBox( + state, + getVolumeTracingById(state.annotation, ua.value.actionTracingId), + ua, + ); + } + case "deleteUserBoundingBoxInVolumeTracing": { + return applyDeleteUserBoundingBox( + state, + getVolumeTracingById(state.annotation, ua.value.actionTracingId), + ua, + ); + } + case "updateSegmentGroupsExpandedState": + case "updateUserBoundingBoxVisibilityInVolumeTracing": { + // These update actions are user specific and don't need to be incorporated here + // because they are from another user. + return state; + } + default: { + ua satisfies never; + } + } + + ua satisfies never; + + // Satisfy TS. + throw new Error("Reached unexpected part of function."); +} diff --git a/frontend/javascripts/viewer/model/reducers/volumetracing_reducer.ts b/frontend/javascripts/viewer/model/reducers/volumetracing_reducer.ts index c04efa56632..28ff50809ca 100644 --- a/frontend/javascripts/viewer/model/reducers/volumetracing_reducer.ts +++ b/frontend/javascripts/viewer/model/reducers/volumetracing_reducer.ts @@ -66,6 +66,7 @@ import { mapGroups, mapGroupsToGenerator } from "../accessors/skeletontracing_ac import type { TreeGroup } from "../types/tree_types"; import { sanitizeMetadata } from "./skeletontracing_reducer"; import { forEachGroups } from "./skeletontracing_reducer_helpers"; +import { applyVolumeUpdateActionsFromServer } from "./update_action_application/volume"; type SegmentUpdateInfo = | { @@ -322,7 +323,7 @@ export function serverVolumeToClientVolumeTracing( return volumeTracing; } -type VolumeTracingReducerAction = +export type VolumeTracingReducerAction = | VolumeTracingAction | SetMappingAction | FinishMappingInitializationAction @@ -436,6 +437,9 @@ function VolumeTracingReducer( volumes: { $set: newVolumes, }, + readOnly: { + $set: null, + }, }, }); @@ -676,6 +680,11 @@ function VolumeTracingReducer( }); } + case "APPLY_VOLUME_UPDATE_ACTIONS_FROM_SERVER": { + const { actions } = action; + return applyVolumeUpdateActionsFromServer(actions, state, VolumeTracingReducer); + } + default: return state; } diff --git a/frontend/javascripts/viewer/model/reducers/volumetracing_reducer_helpers.ts b/frontend/javascripts/viewer/model/reducers/volumetracing_reducer_helpers.ts index f79911522e6..1a8bce01e68 100644 --- a/frontend/javascripts/viewer/model/reducers/volumetracing_reducer_helpers.ts +++ b/frontend/javascripts/viewer/model/reducers/volumetracing_reducer_helpers.ts @@ -151,7 +151,7 @@ export function setContourTracingModeReducer( export function setLargestSegmentIdReducer( state: WebknossosState, volumeTracing: VolumeTracing, - id: number, + id: number | null, ) { return updateVolumeTracing(state, volumeTracing.tracingId, { largestSegmentId: id, diff --git a/frontend/javascripts/viewer/model/sagas/annotation_saga.tsx b/frontend/javascripts/viewer/model/sagas/annotation_saga.tsx index bf0be52f494..7964c164548 100644 --- a/frontend/javascripts/viewer/model/sagas/annotation_saga.tsx +++ b/frontend/javascripts/viewer/model/sagas/annotation_saga.tsx @@ -1,36 +1,19 @@ import type { EditableAnnotation } from "admin/rest_api"; -import { acquireAnnotationMutex, editAnnotation } from "admin/rest_api"; -import { Button } from "antd"; +import { editAnnotation } from "admin/rest_api"; import ErrorHandling from "libs/error_handling"; import Toast from "libs/toast"; import * as Utils from "libs/utils"; import _ from "lodash"; import messages from "messages"; -import React from "react"; import type { ActionPattern } from "redux-saga/effects"; -import { - call, - cancel, - cancelled, - delay, - fork, - put, - retry, - take, - takeEvery, - takeLatest, -} from "typed-redux-saga"; -import type { APIUserCompact } from "types/api_types"; +import { call, delay, put, retry, take, takeLatest } from "typed-redux-saga"; import constants, { MappingStatusEnum } from "viewer/constants"; import { getMappingInfo, is2dDataset } from "viewer/model/accessors/dataset_accessor"; import { getActiveMagIndexForLayer } from "viewer/model/accessors/flycam_accessor"; import type { Action } from "viewer/model/actions/actions"; -import { - type EditAnnotationLayerAction, - type SetAnnotationDescriptionAction, - type SetOthersMayEditForAnnotationAction, - setAnnotationAllowUpdateAction, - setBlockedByUserAction, +import type { + EditAnnotationLayerAction, + SetAnnotationDescriptionAction, } from "viewer/model/actions/annotation_actions"; import { setVersionRestoreVisibilityAction } from "viewer/model/actions/ui_actions"; import type { Saga } from "viewer/model/sagas/effect-generators"; @@ -38,7 +21,7 @@ import { select } from "viewer/model/sagas/effect-generators"; import { SETTINGS_MAX_RETRY_COUNT, SETTINGS_RETRY_DELAY, -} from "viewer/model/sagas/save_saga_constants"; +} from "viewer/model/sagas/saving/save_saga_constants"; import { Model } from "viewer/singletons"; import Store from "viewer/store"; import { determineLayout } from "viewer/view/layouting/default_layout_configs"; @@ -48,7 +31,8 @@ import { mayEditAnnotationProperties } from "../accessors/annotation_accessor"; import { needsLocalHdf5Mapping } from "../accessors/volumetracing_accessor"; import { pushSaveQueueTransaction } from "../actions/save_actions"; import { ensureWkReady } from "./ready_sagas"; -import { updateAnnotationLayerName, updateMetadataOfAnnotation } from "./update_actions"; +import { acquireAnnotationMutexMaybe } from "./saving/save_mutex_saga"; +import { updateAnnotationLayerName, updateMetadataOfAnnotation } from "./volume/update_actions"; /* Note that this must stay in sync with the back-end constant MaxMagForAgglomerateMapping compare https://github.com/scalableminds/webknossos/issues/5223. @@ -237,121 +221,6 @@ export function* watchAnnotationAsync(): Saga { yield* takeLatest("EDIT_ANNOTATION_LAYER", pushAnnotationLayerUpdateAsync); } -export function* acquireAnnotationMutexMaybe(): Saga { - yield* call(ensureWkReady); - const allowUpdate = yield* select((state) => state.annotation.restrictions.allowUpdate); - const annotationId = yield* select((storeState) => storeState.annotation.annotationId); - if (!allowUpdate) { - return; - } - const othersMayEdit = yield* select((state) => state.annotation.othersMayEdit); - const activeUser = yield* select((state) => state.activeUser); - const acquireMutexInterval = 1000 * 60; - const RETRY_COUNT = 12; - const MUTEX_NOT_ACQUIRED_KEY = "MutexCouldNotBeAcquired"; - const MUTEX_ACQUIRED_KEY = "AnnotationMutexAcquired"; - let isInitialRequest = true; - let doesHaveMutexFromBeginning = false; - let doesHaveMutex = false; - let shallTryAcquireMutex = othersMayEdit; - - function onMutexStateChanged(canEdit: boolean, blockedByUser: APIUserCompact | null | undefined) { - if (canEdit) { - Toast.close("MutexCouldNotBeAcquired"); - if (!isInitialRequest) { - const message = ( - - {messages["annotation.acquiringMutexSucceeded"]}{" "} - - - ); - Toast.success(message, { sticky: true, key: MUTEX_ACQUIRED_KEY }); - } - } else { - Toast.close(MUTEX_ACQUIRED_KEY); - const message = - blockedByUser != null - ? messages["annotation.acquiringMutexFailed"]({ - userName: `${blockedByUser.firstName} ${blockedByUser.lastName}`, - }) - : messages["annotation.acquiringMutexFailed.noUser"]; - Toast.warning(message, { sticky: true, key: MUTEX_NOT_ACQUIRED_KEY }); - } - } - - function* tryAcquireMutexContinuously(): Saga { - while (shallTryAcquireMutex) { - if (isInitialRequest) { - yield* put(setAnnotationAllowUpdateAction(false)); - } - try { - const { canEdit, blockedByUser } = yield* retry( - RETRY_COUNT, - acquireMutexInterval / RETRY_COUNT, - acquireAnnotationMutex, - annotationId, - ); - if (isInitialRequest && canEdit) { - doesHaveMutexFromBeginning = true; - // Only set allow update to true in case the first try to get the mutex succeeded. - yield* put(setAnnotationAllowUpdateAction(true)); - } - if (!canEdit || !doesHaveMutexFromBeginning) { - // If the mutex could not be acquired anymore or the user does not have it from the beginning, set allow update to false. - doesHaveMutexFromBeginning = false; - yield* put(setAnnotationAllowUpdateAction(false)); - } - if (canEdit) { - yield* put(setBlockedByUserAction(activeUser)); - } else { - yield* put(setBlockedByUserAction(blockedByUser)); - } - if (canEdit !== doesHaveMutex || isInitialRequest) { - doesHaveMutex = canEdit; - onMutexStateChanged(canEdit, blockedByUser); - } - } catch (error) { - if (process.env.IS_TESTING) { - // In unit tests, that explicitly control this generator function, - // the console.error after the next yield won't be printed, because - // test assertions on the yield will already throw. - // Therefore, we also print the error in the test context. - console.error("Error while trying to acquire mutex:", error); - } - const wasCanceled = yield* cancelled(); - if (!wasCanceled) { - console.error("Error while trying to acquire mutex.", error); - yield* put(setBlockedByUserAction(undefined)); - yield* put(setAnnotationAllowUpdateAction(false)); - doesHaveMutexFromBeginning = false; - if (doesHaveMutex || isInitialRequest) { - onMutexStateChanged(false, null); - doesHaveMutex = false; - } - } - } - isInitialRequest = false; - yield* call(delay, acquireMutexInterval); - } - } - let runningTryAcquireMutexContinuouslySaga = yield* fork(tryAcquireMutexContinuously); - function* reactToOthersMayEditChanges({ - othersMayEdit, - }: SetOthersMayEditForAnnotationAction): Saga { - shallTryAcquireMutex = othersMayEdit; - if (shallTryAcquireMutex) { - if (runningTryAcquireMutexContinuouslySaga != null) { - yield* cancel(runningTryAcquireMutexContinuouslySaga); - } - isInitialRequest = true; - runningTryAcquireMutexContinuouslySaga = yield* fork(tryAcquireMutexContinuously); - } else { - // othersMayEdit was turned off. The user editing it should be able to edit the annotation. - yield* put(setAnnotationAllowUpdateAction(true)); - } - } - yield* takeEvery("SET_OTHERS_MAY_EDIT_FOR_ANNOTATION", reactToOthersMayEditChanges); -} export default [ warnAboutSegmentationZoom, watchAnnotationAsync, diff --git a/frontend/javascripts/viewer/model/sagas/root_saga.ts b/frontend/javascripts/viewer/model/sagas/root_saga.ts index ca29fc70788..ab5b1e169e6 100644 --- a/frontend/javascripts/viewer/model/sagas/root_saga.ts +++ b/frontend/javascripts/viewer/model/sagas/root_saga.ts @@ -8,15 +8,15 @@ import listenToClipHistogramSaga from "viewer/model/sagas/clip_histogram_saga"; import DatasetSagas from "viewer/model/sagas/dataset_saga"; import type { Saga } from "viewer/model/sagas/effect-generators"; import loadHistogramDataSaga from "viewer/model/sagas/load_histogram_data_saga"; -import MappingSaga from "viewer/model/sagas/mapping_saga"; import { watchDataRelevantChanges } from "viewer/model/sagas/prefetch_saga"; -import ProofreadSaga from "viewer/model/sagas/proofread_saga"; import ReadySagas from "viewer/model/sagas/ready_sagas"; -import SaveSagas, { toggleErrorHighlighting } from "viewer/model/sagas/save_saga"; +import SaveSagas from "viewer/model/sagas/saving/save_saga"; import SettingsSaga from "viewer/model/sagas/settings_saga"; import SkeletontracingSagas from "viewer/model/sagas/skeletontracing_saga"; import watchTasksAsync, { warnAboutMagRestriction } from "viewer/model/sagas/task_saga"; import UndoSaga from "viewer/model/sagas/undo_saga"; +import MappingSaga from "viewer/model/sagas/volume/mapping_saga"; +import ProofreadSaga from "viewer/model/sagas/volume/proofread_saga"; import VolumetracingSagas from "viewer/model/sagas/volumetracing_saga"; import type { EscalateErrorAction } from "../actions/actions"; import { setIsWkReadyAction } from "../actions/ui_actions"; @@ -24,6 +24,7 @@ import maintainMaximumZoomForAllMagsSaga from "./flycam_info_cache_saga"; import adHocMeshSaga from "./meshes/ad_hoc_mesh_saga"; import commonMeshSaga, { handleAdditionalCoordinateUpdate } from "./meshes/common_mesh_saga"; import precomputedMeshSaga from "./meshes/precomputed_mesh_saga"; +import { toggleErrorHighlighting } from "./saving/save_queue_draining"; import splitBoundaryMeshSaga from "./split_boundary_mesh_saga"; import { warnIfEmailIsUnverified } from "./user_saga"; diff --git a/frontend/javascripts/viewer/model/sagas/save_saga.ts b/frontend/javascripts/viewer/model/sagas/save_saga.ts deleted file mode 100644 index d8148a415c8..00000000000 --- a/frontend/javascripts/viewer/model/sagas/save_saga.ts +++ /dev/null @@ -1,624 +0,0 @@ -import { getNewestVersionForAnnotation, sendSaveRequestWithToken } from "admin/rest_api"; -import Date from "libs/date"; -import ErrorHandling from "libs/error_handling"; -import Toast from "libs/toast"; -import { sleep } from "libs/utils"; -import window, { alert, document, location } from "libs/window"; -import _ from "lodash"; -import memoizeOne from "memoize-one"; -import messages from "messages"; -import { buffers } from "redux-saga"; -import { actionChannel, call, delay, fork, put, race, take, takeEvery } from "typed-redux-saga"; -import { ControlModeEnum } from "viewer/constants"; -import { getMagInfo } from "viewer/model/accessors/dataset_accessor"; -import { selectTracing } from "viewer/model/accessors/tracing_accessor"; -import { FlycamActions } from "viewer/model/actions/flycam_actions"; -import { - type EnsureTracingsWereDiffedToSaveQueueAction, - pushSaveQueueTransaction, - setLastSaveTimestampAction, - setSaveBusyAction, - setVersionNumberAction, - shiftSaveQueueAction, -} from "viewer/model/actions/save_actions"; -import type { InitializeSkeletonTracingAction } from "viewer/model/actions/skeletontracing_actions"; -import { SkeletonTracingSaveRelevantActions } from "viewer/model/actions/skeletontracing_actions"; -import { ViewModeSaveRelevantActions } from "viewer/model/actions/view_mode_actions"; -import { - type InitializeVolumeTracingAction, - VolumeTracingSaveRelevantActions, -} from "viewer/model/actions/volumetracing_actions"; -import compactSaveQueue from "viewer/model/helpers/compaction/compact_save_queue"; -import compactUpdateActions from "viewer/model/helpers/compaction/compact_update_actions"; -import { globalPositionToBucketPosition } from "viewer/model/helpers/position_converter"; -import type { Saga } from "viewer/model/sagas/effect-generators"; -import { select } from "viewer/model/sagas/effect-generators"; -import { ensureWkReady } from "viewer/model/sagas/ready_sagas"; -import { - MAXIMUM_ACTION_COUNT_PER_SAVE, - MAX_SAVE_RETRY_WAITING_TIME, - PUSH_THROTTLE_TIME, - SAVE_RETRY_WAITING_TIME, -} from "viewer/model/sagas/save_saga_constants"; -import { diffSkeletonTracing } from "viewer/model/sagas/skeletontracing_saga"; -import { - type UpdateActionWithoutIsolationRequirement, - updateCameraAnnotation, - updateTdCamera, -} from "viewer/model/sagas/update_actions"; -import { diffVolumeTracing } from "viewer/model/sagas/volumetracing_saga"; -import { Model } from "viewer/singletons"; -import type { - CameraData, - Flycam, - SaveQueueEntry, - SkeletonTracing, - VolumeTracing, -} from "viewer/store"; -import { getFlooredPosition, getRotation } from "../accessors/flycam_accessor"; -import type { Action } from "../actions/actions"; -import type { BatchedAnnotationInitializationAction } from "../actions/annotation_actions"; -import { takeEveryWithBatchActionSupport } from "./saga_helpers"; - -const ONE_YEAR_MS = 365 * 24 * 3600 * 1000; - -export function* pushSaveQueueAsync(): Saga { - yield* call(ensureWkReady); - - yield* put(setLastSaveTimestampAction()); - let loopCounter = 0; - - while (true) { - loopCounter++; - let saveQueue; - // Check whether the save queue is actually empty, the PUSH_SAVE_QUEUE_TRANSACTION action - // could have been triggered during the call to sendSaveRequestToServer - saveQueue = yield* select((state) => state.save.queue); - - if (saveQueue.length === 0) { - if (loopCounter % 100 === 0) { - // See https://github.com/scalableminds/webknossos/pull/6076 (or 82e16e1) for an explanation - // of this delay call. - yield* delay(0); - } - - // Save queue is empty, wait for push event - yield* take("PUSH_SAVE_QUEUE_TRANSACTION"); - } - - const { forcePush } = yield* race({ - timeout: delay(PUSH_THROTTLE_TIME), - forcePush: take("SAVE_NOW"), - }); - yield* put(setSaveBusyAction(true)); - - // Send (parts of) the save queue to the server. - // There are two main cases: - // 1) forcePush is true - // The user explicitly requested to save an annotation. - // In this case, batches are sent to the server until the save - // queue is empty. Note that the save queue might be added to - // while saving is in progress. Still, the save queue will be - // drained until it is empty. If the user hits save and continuously - // annotates further, a high number of save-requests might be sent. - // 2) forcePush is false - // The auto-save interval was reached at time T. The following code - // will determine how many items are in the save queue at this time T. - // Exactly that many items will be sent to the server. - // New items that might be added to the save queue during saving, will be - // ignored (they will be picked up in the next iteration of this loop). - // Otherwise, the risk of a high number of save-requests (see case 1) - // would be present here, too (note the risk would be greater, because the - // user didn't use the save button which is usually accompanied by a small pause). - const itemCountToSave = forcePush - ? Number.POSITIVE_INFINITY - : yield* select((state) => state.save.queue.length); - let savedItemCount = 0; - while (savedItemCount < itemCountToSave) { - saveQueue = yield* select((state) => state.save.queue); - - if (saveQueue.length > 0) { - savedItemCount += yield* call(sendSaveRequestToServer); - } else { - break; - } - } - yield* put(setSaveBusyAction(false)); - } -} - -// This function returns the first n batches of the provided array, so that the count of -// all actions in these n batches does not exceed MAXIMUM_ACTION_COUNT_PER_SAVE -function sliceAppropriateBatchCount(batches: Array): Array { - const slicedBatches = []; - let actionCount = 0; - - for (const batch of batches) { - const newActionCount = actionCount + batch.actions.length; - - if (newActionCount <= MAXIMUM_ACTION_COUNT_PER_SAVE) { - actionCount = newActionCount; - slicedBatches.push(batch); - } else { - break; - } - } - - return slicedBatches; -} - -function getRetryWaitTime(retryCount: number) { - // Exponential backoff up until MAX_SAVE_RETRY_WAITING_TIME - return Math.min(2 ** retryCount * SAVE_RETRY_WAITING_TIME, MAX_SAVE_RETRY_WAITING_TIME); -} - -// The value for this boolean does not need to be restored to false -// at any time, because the browser page is reloaded after the message is shown, anyway. -let didShowFailedSimultaneousTracingError = false; - -export function* sendSaveRequestToServer(): Saga { - /* - * Saves a reasonably-sized part of the save queue to the server (plus retry-mechanism). - * The saga returns the number of save queue items that were saved. - */ - - const fullSaveQueue = yield* select((state) => state.save.queue); - const saveQueue = sliceAppropriateBatchCount(fullSaveQueue); - let compactedSaveQueue = compactSaveQueue(saveQueue); - const version = yield* select((state) => state.annotation.version); - const annotationId = yield* select((state) => state.annotation.annotationId); - const tracingStoreUrl = yield* select((state) => state.annotation.tracingStore.url); - let versionIncrement; - [compactedSaveQueue, versionIncrement] = addVersionNumbers(compactedSaveQueue, version); - let retryCount = 0; - - // This while-loop only exists for the purpose of a retry-mechanism - while (true) { - let exceptionDuringMarkBucketsAsNotDirty = false; - - try { - const startTime = Date.now(); - yield* call( - sendSaveRequestWithToken, - `${tracingStoreUrl}/tracings/annotation/${annotationId}/update?token=`, - { - method: "POST", - data: compactedSaveQueue, - compress: process.env.NODE_ENV === "production", - // Suppressing error toast, as the doWithToken retry with personal token functionality should not show an error. - // Instead the error is logged and toggleErrorHighlighting should take care of showing an error to the user. - showErrorToast: false, - }, - ); - const endTime = Date.now(); - - if (endTime - startTime > PUSH_THROTTLE_TIME) { - yield* call( - [ErrorHandling, ErrorHandling.notify], - new Error( - `Warning: Save request took more than ${Math.ceil(PUSH_THROTTLE_TIME / 1000)} seconds.`, - ), - ); - } - - yield* put(setVersionNumberAction(version + versionIncrement)); - yield* put(setLastSaveTimestampAction()); - yield* put(shiftSaveQueueAction(saveQueue.length)); - - try { - yield* call(markBucketsAsNotDirty, compactedSaveQueue); - } catch (error) { - // If markBucketsAsNotDirty fails some reason, wk cannot recover from this error. - console.warn("Error when marking buckets as clean. No retry possible. Error:", error); - exceptionDuringMarkBucketsAsNotDirty = true; - throw error; - } - - yield* call(toggleErrorHighlighting, false); - return saveQueue.length; - } catch (error) { - if (exceptionDuringMarkBucketsAsNotDirty) { - throw error; - } - - console.warn("Error during saving. Will retry. Error:", error); - const controlMode = yield* select((state) => state.temporaryConfiguration.controlMode); - const isViewOrSandboxMode = - controlMode === ControlModeEnum.VIEW || controlMode === ControlModeEnum.SANDBOX; - - if (!isViewOrSandboxMode) { - // Notify user about error unless, view or sandbox mode is active. In that case, - // we do not need to show the error as it is not so important and distracts the user. - yield* call(toggleErrorHighlighting, true); - } - - // Log the error to airbrake. Also compactedSaveQueue needs to be within an object - // as otherwise the entries would be spread by the notify function. - // @ts-ignore - yield* call({ context: ErrorHandling, fn: ErrorHandling.notify }, error, { - compactedSaveQueue, - retryCount, - }); - - // @ts-ignore - if (error.status === 409) { - // HTTP Code 409 'conflict' for dirty state - // @ts-ignore - window.onbeforeunload = null; - yield* call( - [ErrorHandling, ErrorHandling.notify], - new Error("Saving failed due to '409' status code"), - ); - if (!didShowFailedSimultaneousTracingError) { - // If the saving fails for one tracing (e.g., skeleton), it can also - // fail for another tracing (e.g., volume). The message simply tells the - // user that the saving in general failed. So, there is no sense in showing - // the message multiple times. - yield* call(alert, messages["save.failed_simultaneous_tracing"]); - location.reload(); - didShowFailedSimultaneousTracingError = true; - } - - // Wait "forever" to avoid that the caller initiates other save calls afterwards (e.g., - // can happen if the caller tries to force-flush the save queue). - // The reason we don't throw an error immediately is that this would immediately - // crash all sagas (including saving other tracings). - yield* call(sleep, ONE_YEAR_MS); - throw new Error("Saving failed due to conflict."); - } - - yield* race({ - timeout: delay(getRetryWaitTime(retryCount)), - forcePush: take("SAVE_NOW"), - }); - retryCount++; - } - } -} - -function* markBucketsAsNotDirty(saveQueue: Array) { - const getLayerAndMagInfoForTracingId = memoizeOne((tracingId: string) => { - const segmentationLayer = Model.getSegmentationTracingLayer(tracingId); - const segmentationMagInfo = getMagInfo(segmentationLayer.mags); - return [segmentationLayer, segmentationMagInfo] as const; - }); - for (const saveEntry of saveQueue) { - for (const updateAction of saveEntry.actions) { - if (updateAction.name === "updateBucket") { - const { actionTracingId: tracingId } = updateAction.value; - const [segmentationLayer, segmentationMagInfo] = getLayerAndMagInfoForTracingId(tracingId); - - const { position, mag, additionalCoordinates } = updateAction.value; - const magIndex = segmentationMagInfo.getIndexByMag(mag); - const zoomedBucketAddress = globalPositionToBucketPosition( - position, - segmentationMagInfo.getDenseMags(), - magIndex, - additionalCoordinates, - ); - const bucket = segmentationLayer.cube.getOrCreateBucket(zoomedBucketAddress); - - if (bucket.type === "null") { - continue; - } - - bucket.dirtyCount--; - - if (bucket.dirtyCount === 0) { - bucket.markAsPushed(); - } - } - } - } -} - -export function toggleErrorHighlighting(state: boolean, permanentError: boolean = false): void { - if (document.body != null) { - document.body.classList.toggle("save-error", state); - } - - const message = permanentError ? messages["save.failed.permanent"] : messages["save.failed"]; - - if (state) { - Toast.error(message, { - sticky: true, - }); - } else { - Toast.close(message); - } -} -export function addVersionNumbers( - updateActionsBatches: Array, - lastVersion: number, -): [Array, number] { - let versionIncrement = 0; - const batchesWithVersions = updateActionsBatches.map((batch) => { - if (batch.transactionGroupIndex === 0) { - versionIncrement++; - } - return { ...batch, version: lastVersion + versionIncrement }; - }); - return [batchesWithVersions, versionIncrement]; -} -export function performDiffTracing( - prevTracing: SkeletonTracing | VolumeTracing, - tracing: SkeletonTracing | VolumeTracing, -): Array { - let actions: Array = []; - - if (prevTracing.type === "skeleton" && tracing.type === "skeleton") { - actions = actions.concat(Array.from(diffSkeletonTracing(prevTracing, tracing))); - } - - if (prevTracing.type === "volume" && tracing.type === "volume") { - actions = actions.concat(Array.from(diffVolumeTracing(prevTracing, tracing))); - } - - return actions; -} - -export function performDiffAnnotation( - prevFlycam: Flycam, - flycam: Flycam, - prevTdCamera: CameraData, - tdCamera: CameraData, -): Array { - let actions: Array = []; - - if (prevFlycam !== flycam) { - actions = actions.concat( - updateCameraAnnotation( - getFlooredPosition(flycam), - flycam.additionalCoordinates, - getRotation(flycam), - flycam.zoomStep, - ), - ); - } - - if (prevTdCamera !== tdCamera) { - actions = actions.concat(updateTdCamera()); - } - - return actions; -} - -export function* saveTracingAsync(): Saga { - yield* fork(pushSaveQueueAsync); - yield* takeEvery("INITIALIZE_ANNOTATION_WITH_TRACINGS", setupSavingForAnnotation); - yield* takeEveryWithBatchActionSupport("INITIALIZE_SKELETONTRACING", setupSavingForTracingType); - yield* takeEveryWithBatchActionSupport("INITIALIZE_VOLUMETRACING", setupSavingForTracingType); -} - -function* setupSavingForAnnotation(_action: BatchedAnnotationInitializationAction): Saga { - yield* call(ensureWkReady); - - while (true) { - let prevFlycam = yield* select((state) => state.flycam); - let prevTdCamera = yield* select((state) => state.viewModeData.plane.tdCamera); - yield* take([ - ...FlycamActions, - ...ViewModeSaveRelevantActions, - ...SkeletonTracingSaveRelevantActions, - ]); - // The allowUpdate setting could have changed in the meantime - const allowUpdate = yield* select( - (state) => - state.annotation.restrictions.allowUpdate && state.annotation.restrictions.allowSave, - ); - if (!allowUpdate) continue; - const flycam = yield* select((state) => state.flycam); - const tdCamera = yield* select((state) => state.viewModeData.plane.tdCamera); - - const items = Array.from( - yield* call(performDiffAnnotation, prevFlycam, flycam, prevTdCamera, tdCamera), - ); - - if (items.length > 0) { - yield* put(pushSaveQueueTransaction(items)); - } - - prevFlycam = flycam; - prevTdCamera = tdCamera; - } -} - -export function* setupSavingForTracingType( - initializeAction: InitializeSkeletonTracingAction | InitializeVolumeTracingAction, -): Saga { - /* - Listen to changes to the annotation and derive UpdateActions from the - old and new state. - The actual push to the server is done by the forked pushSaveQueueAsync saga. - */ - const tracingType = - initializeAction.type === "INITIALIZE_SKELETONTRACING" ? "skeleton" : "volume"; - const tracingId = initializeAction.tracing.id; - let prevTracing = (yield* select((state) => selectTracing(state, tracingType, tracingId))) as - | VolumeTracing - | SkeletonTracing; - - yield* call(ensureWkReady); - - const actionBuffer = buffers.expanding(); - const tracingActionChannel = yield* actionChannel( - tracingType === "skeleton" - ? [ - ...SkeletonTracingSaveRelevantActions, - // SET_SKELETON_TRACING is not included in SkeletonTracingSaveRelevantActions, because it is used by Undo/Redo and - // should not create its own Undo/Redo stack entry - "SET_SKELETON_TRACING", - ] - : VolumeTracingSaveRelevantActions, - actionBuffer, - ); - - // See Model.ensureSavedState for an explanation of this action channel. - const ensureDiffedChannel = yield* actionChannel( - "ENSURE_TRACINGS_WERE_DIFFED_TO_SAVE_QUEUE", - ); - - while (true) { - // Prioritize consumption of tracingActionChannel since we don't want to - // reply to the ENSURE_TRACINGS_WERE_DIFFED_TO_SAVE_QUEUE action if there - // are unprocessed user actions. - if (!actionBuffer.isEmpty()) { - yield* take(tracingActionChannel); - } else { - // Wait for either a user action or the "ensureAction". - const { ensureAction } = yield* race({ - _tracingAction: take(tracingActionChannel), - ensureAction: take(ensureDiffedChannel), - }); - if (ensureAction != null) { - ensureAction.callback(tracingId); - continue; - } - } - - // The allowUpdate setting could have changed in the meantime - const allowUpdate = yield* select( - (state) => - state.annotation.restrictions.allowUpdate && state.annotation.restrictions.allowSave, - ); - if (!allowUpdate) continue; - const tracing = (yield* select((state) => selectTracing(state, tracingType, tracingId))) as - | VolumeTracing - | SkeletonTracing; - - const items = compactUpdateActions( - Array.from(yield* call(performDiffTracing, prevTracing, tracing)), - tracing, - ); - - if (items.length > 0) { - yield* put(pushSaveQueueTransaction(items)); - } - - prevTracing = tracing; - } -} - -const VERSION_POLL_INTERVAL_COLLAB = 10 * 1000; -const VERSION_POLL_INTERVAL_READ_ONLY = 60 * 1000; -const VERSION_POLL_INTERVAL_SINGLE_EDITOR = 30 * 1000; - -function* watchForSaveConflicts(): Saga { - function* checkForNewVersion() { - const allowSave = yield* select( - (state) => - state.annotation.restrictions.allowSave && state.annotation.restrictions.allowUpdate, - ); - if (allowSave) { - // The active user is currently the only one that is allowed to mutate the annotation. - // Since we only acquire the mutex upon page load, there shouldn't be any unseen updates - // between the page load and this check here. - // A race condition where - // 1) another user saves version X - // 2) we load the annotation but only get see version X - 1 (this is the race) - // 3) we acquire a mutex - // should not occur, because there is a grace period for which the mutex has to be free until it can - // be acquired again (see annotation.mutex.expiryTime in application.conf). - // The downside of an early return here is that we won't be able to warn the user early - // if the user opened the annotation in two tabs and mutated it there. - // However, - // a) this scenario is pretty rare and the worst case is that they get a 409 error - // during saving and - // b) checking for newer versions when the active user may update the annotation introduces - // a race condition between this saga and the actual save saga. Synchronizing these sagas - // would be possible, but would add further complexity to the mission critical save saga. - return; - } - - const maybeSkeletonTracing = yield* select((state) => state.annotation.skeleton); - const volumeTracings = yield* select((state) => state.annotation.volumes); - const tracingStoreUrl = yield* select((state) => state.annotation.tracingStore.url); - const annotationId = yield* select((state) => state.annotation.annotationId); - - const tracings: Array = _.compact([ - ...volumeTracings, - maybeSkeletonTracing, - ]); - - if (tracings.length === 0) { - return; - } - - const versionOnServer = yield* call( - getNewestVersionForAnnotation, - tracingStoreUrl, - annotationId, - ); - - // Read the tracing version again from the store, since the - // old reference to tracing might be outdated now due to the - // immutability. - const versionOnClient = yield* select((state) => { - return state.annotation.version; - }); - - const toastKey = "save_conflicts_warning"; - if (versionOnServer > versionOnClient) { - // The latest version on the server is greater than the most-recently - // stored version. - - const saveQueue = yield* select((state) => state.save.queue); - - let msg = ""; - if (!allowSave) { - msg = - "A newer version of this annotation was found on the server. Reload the page to see the newest changes."; - } else if (saveQueue.length > 0) { - msg = - "A newer version of this annotation was found on the server. Your current changes to this annotation cannot be saved anymore."; - } else { - msg = - "A newer version of this annotation was found on the server. Please reload the page to see the newer version. Otherwise, changes to the annotation cannot be saved anymore."; - } - Toast.warning(msg, { - sticky: true, - key: toastKey, - }); - } else { - Toast.close(toastKey); - } - } - - function* getPollInterval(): Saga { - const allowSave = yield* select((state) => state.annotation.restrictions.allowSave); - if (!allowSave) { - // The current user may not edit/save the annotation. - return VERSION_POLL_INTERVAL_READ_ONLY; - } - - const othersMayEdit = yield* select((state) => state.annotation.othersMayEdit); - if (othersMayEdit) { - // Other users may edit the annotation. - return VERSION_POLL_INTERVAL_COLLAB; - } - - // The current user is the only one who can edit the annotation. - return VERSION_POLL_INTERVAL_SINGLE_EDITOR; - } - - yield* call(ensureWkReady); - - while (true) { - const interval = yield* call(getPollInterval); - yield* call(sleep, interval); - if (yield* select((state) => state.uiInformation.showVersionRestore)) { - continue; - } - try { - yield* call(checkForNewVersion); - } catch (exception) { - // If the version check fails for some reason, we don't want to crash the entire - // saga. - console.warn(exception); - // @ts-ignore - ErrorHandling.notify(exception); - } - } -} - -export default [saveTracingAsync, watchForSaveConflicts]; diff --git a/frontend/javascripts/viewer/model/sagas/saving/save_mutex_saga.tsx b/frontend/javascripts/viewer/model/sagas/saving/save_mutex_saga.tsx new file mode 100644 index 00000000000..334cf2e1d60 --- /dev/null +++ b/frontend/javascripts/viewer/model/sagas/saving/save_mutex_saga.tsx @@ -0,0 +1,227 @@ +import { acquireAnnotationMutex } from "admin/rest_api"; +import { Button } from "antd"; +import Toast from "libs/toast"; +import messages from "messages"; +import React from "react"; +import { + call, + cancel, + cancelled, + delay, + type FixedTask, + fork, + put, + race, + retry, + take, + takeEvery, +} from "typed-redux-saga"; +import { + type SetIsMutexAcquiredAction, + type SetOthersMayEditForAnnotationAction, + setAnnotationAllowUpdateAction, + setBlockedByUserAction, + setIsMutexAcquiredAction, +} from "viewer/model/actions/annotation_actions"; +import type { Saga } from "viewer/model/sagas/effect-generators"; +import { select } from "viewer/model/sagas/effect-generators"; +import { ensureWkReady } from "../ready_sagas"; +import { EnsureMaySaveNowAction } from "viewer/model/actions/save_actions"; + +// Also refer to application.conf where annotation.mutex.expiryTime is defined +// (typically, 2 minutes). + +const MUTEX_NOT_ACQUIRED_KEY = "MutexCouldNotBeAcquired"; +const MUTEX_ACQUIRED_KEY = "AnnotationMutexAcquired"; +const ACQUIRE_MUTEX_INTERVAL = 1000 * 60; +const RETRY_COUNT = 12; // 12 retries with 60/12=5 seconds backup delay + +// todop +const DISABLE_EAGER_MUTEX_ACQUISITION = true; + +type MutexLogicState = { + isInitialRequest: boolean; +}; + +function* getDoesHaveMutex(): Saga { + return yield* select((state) => state.annotation.isMutexAcquired); +} + +export function* acquireAnnotationMutexMaybe(): Saga { + yield* call(ensureWkReady); + const initialAllowUpdate = yield* select( + (state) => state.annotation.restrictions.initialAllowUpdate, + ); + if (!initialAllowUpdate) { + // We are in an read-only annotation. There's no point in acquiring mutexes. + console.log("exit mutex saga"); + return; + } + const mutexLogicState: MutexLogicState = { + isInitialRequest: true, + }; + + yield* fork(watchMutexStateChangesForNotification, mutexLogicState); + + let runningTryAcquireMutexContinuouslySaga: FixedTask | null; + + function* reactToOthersMayEditChanges({ + othersMayEdit, + }: SetOthersMayEditForAnnotationAction): Saga { + if (othersMayEdit) { + if (runningTryAcquireMutexContinuouslySaga != null) { + yield* cancel(runningTryAcquireMutexContinuouslySaga); + } + runningTryAcquireMutexContinuouslySaga = yield* fork( + tryAcquireMutexContinuously, + mutexLogicState, + ); + } else { + // othersMayEdit was turned off by the activeUser. Since this is only + // allowed by the owner, they should be able to edit the annotation, too. + // Still, let's check that owner === activeUser to be extra safe. + const owner = yield* select((storeState) => storeState.annotation.owner); + const activeUser = yield* select((state) => state.activeUser); + if (activeUser && owner?.id === activeUser?.id) { + yield* put(setAnnotationAllowUpdateAction(true)); + } + } + } + yield* takeEvery("SET_OTHERS_MAY_EDIT_FOR_ANNOTATION", reactToOthersMayEditChanges); + + if (DISABLE_EAGER_MUTEX_ACQUISITION) { + console.log("listening to all ENSURE_MAY_SAVE_NOW"); + yield* takeEvery("ENSURE_MAY_SAVE_NOW", resolveEnsureMaySaveNowActions); + while (true) { + console.log("taking ENSURE_MAY_SAVE_NOW"); + yield* take("ENSURE_MAY_SAVE_NOW"); + console.log("took ENSURE_MAY_SAVE_NOW"); + const { doneSaving } = yield race({ + tryAcquireMutexContinuously: fork(tryAcquireMutexContinuously, mutexLogicState), + doneSaving: take("DONE_SAVING"), + }); + if (doneSaving) { + yield call(releaseMutex); + } + } + } else { + runningTryAcquireMutexContinuouslySaga = yield* fork( + tryAcquireMutexContinuously, + mutexLogicState, + ); + } +} + +function* resolveEnsureMaySaveNowActions(action: EnsureMaySaveNowAction) { + /* + * For each EnsureMaySaveNowAction wait until, we have the mutex. Then call + * the callback. + */ + while (true) { + const doesHaveMutex = yield* select(getDoesHaveMutex); + if (doesHaveMutex) { + action.callback(); + return; + } + yield* take("SET_BLOCKED_BY_USER"); + } +} + +function* tryAcquireMutexContinuously(mutexLogicState: MutexLogicState): Saga { + console.log("started tryAcquireMutexContinuously"); + const annotationId = yield* select((storeState) => storeState.annotation.annotationId); + const activeUser = yield* select((state) => state.activeUser); + mutexLogicState.isInitialRequest = true; + + // We can simply use an infinite loop here, because the saga will be cancelled by + // reactToOthersMayEditChanges when othersMayEdit is set to false. + while (true) { + console.log("tryAcquireMutexContinuously loop"); + const blockedByUser = yield* select((state) => state.annotation.blockedByUser); + if (blockedByUser == null || blockedByUser.id !== activeUser?.id) { + // If the annotation is currently not blocked by the active user, + // we immediately disallow updating the annotation. + yield* put(setAnnotationAllowUpdateAction(false)); + } + try { + const { canEdit, blockedByUser } = yield* retry( + RETRY_COUNT, + ACQUIRE_MUTEX_INTERVAL / RETRY_COUNT, + acquireAnnotationMutex, + annotationId, + ); + yield* put(setAnnotationAllowUpdateAction(canEdit)); + yield* put(setBlockedByUserAction(canEdit ? activeUser : blockedByUser)); + + if (canEdit !== (yield* call(getDoesHaveMutex))) { + // Only dispatch the action if it changes the store to avoid + // unnecessary notifications. + yield* put(setIsMutexAcquiredAction(canEdit)); + } + } catch (error) { + if (process.env.IS_TESTING) { + // In unit tests, that explicitly control this generator function, + // the console.error after the next yield won't be printed, because + // test assertions on the yield will already throw. + // Therefore, we also print the error in the test context. + console.error("Error while trying to acquire mutex:", error); + } + // todop: I think this needs to happen in a finally block? + const wasCanceled = yield* cancelled(); + console.log("wasCanceled", wasCanceled); + if (!wasCanceled) { + console.error("Error while trying to acquire mutex.", error); + yield* put(setBlockedByUserAction(undefined)); + yield* put(setAnnotationAllowUpdateAction(false)); + if (yield* call(getDoesHaveMutex)) { + yield* put(setIsMutexAcquiredAction(false)); + } + } + } + mutexLogicState.isInitialRequest = false; + yield* call(delay, ACQUIRE_MUTEX_INTERVAL); + } +} + +function* watchMutexStateChangesForNotification(mutexLogicState: MutexLogicState): Saga { + yield* takeEvery( + "SET_IS_MUTEX_ACQUIRED", + function* ({ isMutexAcquired }: SetIsMutexAcquiredAction) { + if (isMutexAcquired) { + Toast.close(MUTEX_NOT_ACQUIRED_KEY); + if (!mutexLogicState.isInitialRequest) { + const message = ( + + {messages["annotation.acquiringMutexSucceeded"]}" " + + + ); + Toast.success(message, { sticky: true, key: MUTEX_ACQUIRED_KEY }); + } + } else { + Toast.close(MUTEX_ACQUIRED_KEY); + const blockedByUser = yield* select((state) => state.annotation.blockedByUser); + const message = + blockedByUser != null + ? messages["annotation.acquiringMutexFailed"]({ + userName: `${blockedByUser.firstName} ${blockedByUser.lastName}`, + }) + : messages["annotation.acquiringMutexFailed.noUser"]; + Toast.warning(message, { sticky: true, key: MUTEX_NOT_ACQUIRED_KEY }); + } + mutexLogicState.isInitialRequest = false; + }, + ); +} + +function* releaseMutex() { + const annotationId = yield* select((storeState) => storeState.annotation.annotationId); + yield* retry( + RETRY_COUNT, + ACQUIRE_MUTEX_INTERVAL / RETRY_COUNT, + acquireAnnotationMutex, + annotationId, + ); + yield* put(setAnnotationAllowUpdateAction(true)); + yield* put(setBlockedByUserAction(null)); +} diff --git a/frontend/javascripts/viewer/model/sagas/saving/save_queue_draining.ts b/frontend/javascripts/viewer/model/sagas/saving/save_queue_draining.ts new file mode 100644 index 00000000000..84fb7756e81 --- /dev/null +++ b/frontend/javascripts/viewer/model/sagas/saving/save_queue_draining.ts @@ -0,0 +1,305 @@ +// /* +// * This module contains the sagas responsible for sending the contents of the save queue +// * to the back-end (thus, draining the queue). +// */ + +import { sendSaveRequestWithToken } from "admin/rest_api"; +import Date from "libs/date"; +import ErrorHandling from "libs/error_handling"; +import Toast from "libs/toast"; +import window, { alert, document, location } from "libs/window"; +import memoizeOne from "memoize-one"; +import messages from "messages"; +import { call, delay, put, race, take } from "typed-redux-saga"; +import { ControlModeEnum } from "viewer/constants"; +import { getMagInfo } from "viewer/model/accessors/dataset_accessor"; +import { + dispatchEnsureMaySaveNowAsync, + doneSavingAction, + setLastSaveTimestampAction, + setSaveBusyAction, + setVersionNumberAction, + shiftSaveQueueAction, +} from "viewer/model/actions/save_actions"; +import compactSaveQueue from "viewer/model/helpers/compaction/compact_save_queue"; +import { globalPositionToBucketPosition } from "viewer/model/helpers/position_converter"; +import type { Saga } from "viewer/model/sagas/effect-generators"; +import { select } from "viewer/model/sagas/effect-generators"; +import { ensureWkReady } from "viewer/model/sagas/ready_sagas"; +import { + MAXIMUM_ACTION_COUNT_PER_SAVE, + MAX_SAVE_RETRY_WAITING_TIME, + PUSH_THROTTLE_TIME, + SAVE_RETRY_WAITING_TIME, +} from "viewer/model/sagas/saving/save_saga_constants"; +import { Model, Store } from "viewer/singletons"; +import type { SaveQueueEntry } from "viewer/store"; + +export function* pushSaveQueueAsync(): Saga { + yield* call(ensureWkReady); + + yield* put(setLastSaveTimestampAction()); + let loopCounter = 0; + + while (true) { + loopCounter++; + let saveQueue; + // Check whether the save queue is actually empty, the PUSH_SAVE_QUEUE_TRANSACTION action + // could have been triggered during the call to sendSaveRequestToServer + saveQueue = yield* select((state) => state.save.queue); + + if (saveQueue.length === 0) { + if (loopCounter % 100 === 0) { + // See https://github.com/scalableminds/webknossos/pull/6076 (or 82e16e1) for an explanation + // of this delay call. + yield* delay(0); + } + + // Save queue is empty, wait for push event + yield* take("PUSH_SAVE_QUEUE_TRANSACTION"); + } + + const { forcePush } = yield* race({ + timeout: delay(PUSH_THROTTLE_TIME), + forcePush: take("SAVE_NOW"), + }); + yield* put(setSaveBusyAction(true)); + + // Wait until we may save + yield* call(dispatchEnsureMaySaveNowAsync, Store.dispatch); + + // Send (parts of) the save queue to the server. + // There are two main cases: + // 1) forcePush is true + // The user explicitly requested to save an annotation. + // In this case, batches are sent to the server until the save + // queue is empty. Note that the save queue might be added to + // while saving is in progress. Still, the save queue will be + // drained until it is empty. If the user hits save and continuously + // annotates further, a high number of save-requests might be sent. + // 2) forcePush is false + // The auto-save interval was reached at time T. The following code + // will determine how many items are in the save queue at this time T. + // Exactly that many items will be sent to the server. + // New items that might be added to the save queue during saving, will be + // ignored (they will be picked up in the next iteration of this loop). + // Otherwise, the risk of a high number of save-requests (see case 1) + // would be present here, too (note the risk would be greater, because the + // user didn't use the save button which is usually accompanied by a small pause). + const itemCountToSave = forcePush + ? Number.POSITIVE_INFINITY + : yield* select((state) => state.save.queue.length); + let savedItemCount = 0; + while (savedItemCount < itemCountToSave) { + saveQueue = yield* select((state) => state.save.queue); + + if (saveQueue.length > 0) { + savedItemCount += yield* call(sendSaveRequestToServer); + } else { + break; + } + } + yield* put(doneSavingAction()); + yield* put(setSaveBusyAction(false)); + } +} + +function getRetryWaitTime(retryCount: number) { + // Exponential backoff up until MAX_SAVE_RETRY_WAITING_TIME + return Math.min(2 ** retryCount * SAVE_RETRY_WAITING_TIME, MAX_SAVE_RETRY_WAITING_TIME); +} + +export function* sendSaveRequestToServer(): Saga { + /* + * Saves a reasonably-sized part of the save queue to the server (plus retry-mechanism). + * The saga returns the number of save queue items that were saved. + */ + + const fullSaveQueue = yield* select((state) => state.save.queue); + const saveQueue = sliceAppropriateBatchCount(fullSaveQueue); + let compactedSaveQueue = compactSaveQueue(saveQueue); + const version = yield* select((state) => state.annotation.version); + const annotationId = yield* select((state) => state.annotation.annotationId); + const tracingStoreUrl = yield* select((state) => state.annotation.tracingStore.url); + let versionIncrement; + [compactedSaveQueue, versionIncrement] = addVersionNumbers(compactedSaveQueue, version); + let retryCount = 0; + + // This while-loop only exists for the purpose of a retry-mechanism + while (true) { + let exceptionDuringMarkBucketsAsNotDirty = false; + + try { + const startTime = Date.now(); + yield* call( + sendSaveRequestWithToken, + `${tracingStoreUrl}/tracings/annotation/${annotationId}/update?token=`, + { + method: "POST", + data: compactedSaveQueue, + compress: process.env.NODE_ENV === "production", + // Suppressing error toast, as the doWithToken retry with personal token functionality should not show an error. + // Instead the error is logged and toggleErrorHighlighting should take care of showing an error to the user. + showErrorToast: false, + }, + ); + const endTime = Date.now(); + + if (endTime - startTime > PUSH_THROTTLE_TIME) { + yield* call( + [ErrorHandling, ErrorHandling.notify], + new Error( + `Warning: Save request took more than ${Math.ceil(PUSH_THROTTLE_TIME / 1000)} seconds.`, + ), + ); + } + + yield* put(setVersionNumberAction(version + versionIncrement)); + yield* put(setLastSaveTimestampAction()); + yield* put(shiftSaveQueueAction(saveQueue.length)); + + try { + yield* call(markBucketsAsNotDirty, compactedSaveQueue); + } catch (error) { + // If markBucketsAsNotDirty fails some reason, wk cannot recover from this error. + console.warn("Error when marking buckets as clean. No retry possible. Error:", error); + exceptionDuringMarkBucketsAsNotDirty = true; + throw error; + } + + yield* call(toggleErrorHighlighting, false); + return saveQueue.length; + } catch (error) { + if (exceptionDuringMarkBucketsAsNotDirty) { + throw error; + } + + console.warn("Error during saving. Will retry. Error:", error); + const controlMode = yield* select((state) => state.temporaryConfiguration.controlMode); + const isViewOrSandboxMode = + controlMode === ControlModeEnum.VIEW || controlMode === ControlModeEnum.SANDBOX; + + if (!isViewOrSandboxMode) { + // Notify user about error unless, view or sandbox mode is active. In that case, + // we do not need to show the error as it is not so important and distracts the user. + yield* call(toggleErrorHighlighting, true); + } + + // Log the error to airbrake. Also compactedSaveQueue needs to be within an object + // as otherwise the entries would be spread by the notify function. + // @ts-ignore + yield* call({ context: ErrorHandling, fn: ErrorHandling.notify }, error, { + compactedSaveQueue, + retryCount, + }); + + // @ts-ignore + if (error.status === 409) { + // HTTP Code 409 'conflict' for dirty state + // @ts-ignore + window.onbeforeunload = null; + yield* call( + [ErrorHandling, ErrorHandling.notify], + new Error("Saving failed due to '409' status code"), + ); + + yield* call(alert, messages["save.failed_simultaneous_tracing"]); + location.reload(); + + throw new Error("Saving failed due to conflict."); + } + + yield* race({ + timeout: delay(getRetryWaitTime(retryCount)), + forcePush: take("SAVE_NOW"), + }); + retryCount++; + } + } +} + +function* markBucketsAsNotDirty(saveQueue: Array) { + const getLayerAndMagInfoForTracingId = memoizeOne((tracingId: string) => { + const segmentationLayer = Model.getSegmentationTracingLayer(tracingId); + const segmentationMagInfo = getMagInfo(segmentationLayer.mags); + return [segmentationLayer, segmentationMagInfo] as const; + }); + for (const saveEntry of saveQueue) { + for (const updateAction of saveEntry.actions) { + if (updateAction.name === "updateBucket") { + const { actionTracingId: tracingId } = updateAction.value; + const [segmentationLayer, segmentationMagInfo] = getLayerAndMagInfoForTracingId(tracingId); + + const { position, mag, additionalCoordinates } = updateAction.value; + const magIndex = segmentationMagInfo.getIndexByMag(mag); + const zoomedBucketAddress = globalPositionToBucketPosition( + position, + segmentationMagInfo.getDenseMags(), + magIndex, + additionalCoordinates, + ); + const bucket = segmentationLayer.cube.getOrCreateBucket(zoomedBucketAddress); + + if (bucket.type === "null") { + continue; + } + + bucket.dirtyCount--; + + if (bucket.dirtyCount === 0) { + bucket.markAsPushed(); + } + } + } + } +} + +export function toggleErrorHighlighting(state: boolean, permanentError: boolean = false): void { + if (document.body != null) { + document.body.classList.toggle("save-error", state); + } + + const message = permanentError ? messages["save.failed.permanent"] : messages["save.failed"]; + + if (state) { + Toast.error(message, { + sticky: true, + }); + } else { + Toast.close(message); + } +} + +// This function returns the first n batches of the provided array, so that the count of +// all actions in these n batches does not exceed MAXIMUM_ACTION_COUNT_PER_SAVE +function sliceAppropriateBatchCount(batches: Array): Array { + const slicedBatches = []; + let actionCount = 0; + + for (const batch of batches) { + const newActionCount = actionCount + batch.actions.length; + + if (newActionCount <= MAXIMUM_ACTION_COUNT_PER_SAVE) { + actionCount = newActionCount; + slicedBatches.push(batch); + } else { + break; + } + } + + return slicedBatches; +} + +export function addVersionNumbers( + updateActionsBatches: Array, + lastVersion: number, +): [Array, number] { + let versionIncrement = 0; + const batchesWithVersions = updateActionsBatches.map((batch) => { + if (batch.transactionGroupIndex === 0) { + versionIncrement++; + } + return { ...batch, version: lastVersion + versionIncrement }; + }); + return [batchesWithVersions, versionIncrement]; +} diff --git a/frontend/javascripts/viewer/model/sagas/saving/save_queue_filling.ts b/frontend/javascripts/viewer/model/sagas/saving/save_queue_filling.ts new file mode 100644 index 00000000000..f25c68947a3 --- /dev/null +++ b/frontend/javascripts/viewer/model/sagas/saving/save_queue_filling.ts @@ -0,0 +1,191 @@ +/* + * This module contains the sagas responsible for populating the save queue + * with update actions that need to be saved to the server. Note that for proofreading, + * the proofreading saga is directly responsible for filling the queue. + */ + +import { buffers } from "redux-saga"; +import { actionChannel, call, put, race, take } from "typed-redux-saga"; +import { selectTracing } from "viewer/model/accessors/tracing_accessor"; +import { FlycamActions } from "viewer/model/actions/flycam_actions"; +import { + type EnsureTracingsWereDiffedToSaveQueueAction, + pushSaveQueueTransaction, +} from "viewer/model/actions/save_actions"; +import type { InitializeSkeletonTracingAction } from "viewer/model/actions/skeletontracing_actions"; +import { SkeletonTracingSaveRelevantActions } from "viewer/model/actions/skeletontracing_actions"; +import { ViewModeSaveRelevantActions } from "viewer/model/actions/view_mode_actions"; +import { + type InitializeVolumeTracingAction, + VolumeTracingSaveRelevantActions, +} from "viewer/model/actions/volumetracing_actions"; +import compactUpdateActions from "viewer/model/helpers/compaction/compact_update_actions"; +import type { Saga } from "viewer/model/sagas/effect-generators"; +import { select } from "viewer/model/sagas/effect-generators"; +import { ensureWkReady } from "viewer/model/sagas/ready_sagas"; +import { diffSkeletonTracing } from "viewer/model/sagas/skeletontracing_saga"; +import { + type UpdateActionWithoutIsolationRequirement, + updateCameraAnnotation, + updateTdCamera, +} from "viewer/model/sagas/volume/update_actions"; +import { diffVolumeTracing } from "viewer/model/sagas/volumetracing_saga"; +import type { CameraData, Flycam, SkeletonTracing, VolumeTracing } from "viewer/store"; +import { getFlooredPosition, getRotation } from "../../accessors/flycam_accessor"; +import type { Action } from "../../actions/actions"; +import type { BatchedAnnotationInitializationAction } from "../../actions/annotation_actions"; + +export function* setupSavingForAnnotation( + _action: BatchedAnnotationInitializationAction, +): Saga { + yield* call(ensureWkReady); + + while (true) { + let prevFlycam = yield* select((state) => state.flycam); + let prevTdCamera = yield* select((state) => state.viewModeData.plane.tdCamera); + yield* take([ + ...FlycamActions, + ...ViewModeSaveRelevantActions, + ...SkeletonTracingSaveRelevantActions, + ]); + // The allowUpdate setting could have changed in the meantime + const allowUpdate = yield* select( + (state) => + state.annotation.restrictions.allowUpdate && state.annotation.restrictions.allowSave, + ); + if (!allowUpdate) continue; + const flycam = yield* select((state) => state.flycam); + const tdCamera = yield* select((state) => state.viewModeData.plane.tdCamera); + + const items = Array.from( + yield* call(performDiffAnnotation, prevFlycam, flycam, prevTdCamera, tdCamera), + ); + + if (items.length > 0) { + yield* put(pushSaveQueueTransaction(items)); + } + + prevFlycam = flycam; + prevTdCamera = tdCamera; + } +} + +export function* setupSavingForTracingType( + initializeAction: InitializeSkeletonTracingAction | InitializeVolumeTracingAction, +): Saga { + /* + Listen to changes to the annotation and derive UpdateActions from the + old and new state. + The actual push to the server is done by the forked pushSaveQueueAsync saga. + */ + const tracingType = + initializeAction.type === "INITIALIZE_SKELETONTRACING" ? "skeleton" : "volume"; + const tracingId = initializeAction.tracing.id; + let prevTracing = (yield* select((state) => selectTracing(state, tracingType, tracingId))) as + | VolumeTracing + | SkeletonTracing; + + yield* call(ensureWkReady); + + const actionBuffer = buffers.expanding(); + const tracingActionChannel = yield* actionChannel( + tracingType === "skeleton" + ? [ + ...SkeletonTracingSaveRelevantActions, + // SET_SKELETON_TRACING is not included in SkeletonTracingSaveRelevantActions, because it is used by Undo/Redo and + // should not create its own Undo/Redo stack entry + "SET_SKELETON_TRACING", + ] + : VolumeTracingSaveRelevantActions, + actionBuffer, + ); + + // See Model.ensureSavedState for an explanation of this action channel. + const ensureDiffedChannel = yield* actionChannel( + "ENSURE_TRACINGS_WERE_DIFFED_TO_SAVE_QUEUE", + ); + + while (true) { + // Prioritize consumption of tracingActionChannel since we don't want to + // reply to the ENSURE_TRACINGS_WERE_DIFFED_TO_SAVE_QUEUE action if there + // are unprocessed user actions. + if (!actionBuffer.isEmpty()) { + yield* take(tracingActionChannel); + } else { + // Wait for either a user action or the "ensureAction". + const { ensureAction } = yield* race({ + _tracingAction: take(tracingActionChannel), + ensureAction: take(ensureDiffedChannel), + }); + if (ensureAction != null) { + ensureAction.callback(tracingId); + continue; + } + } + + // The allowUpdate setting could have changed in the meantime + const allowUpdate = yield* select( + (state) => + state.annotation.restrictions.allowUpdate && state.annotation.restrictions.allowSave, + ); + if (!allowUpdate) continue; + const tracing = (yield* select((state) => selectTracing(state, tracingType, tracingId))) as + | VolumeTracing + | SkeletonTracing; + + const items = compactUpdateActions( + Array.from(yield* call(performDiffTracing, prevTracing, tracing)), + prevTracing, + tracing, + ); + + if (items.length > 0) { + yield* put(pushSaveQueueTransaction(items)); + } + + prevTracing = tracing; + } +} + +export function performDiffTracing( + prevTracing: SkeletonTracing | VolumeTracing, + tracing: SkeletonTracing | VolumeTracing, +): Array { + let actions: Array = []; + + if (prevTracing.type === "skeleton" && tracing.type === "skeleton") { + actions = actions.concat(Array.from(diffSkeletonTracing(prevTracing, tracing))); + } + + if (prevTracing.type === "volume" && tracing.type === "volume") { + actions = actions.concat(Array.from(diffVolumeTracing(prevTracing, tracing))); + } + + return actions; +} + +export function performDiffAnnotation( + prevFlycam: Flycam, + flycam: Flycam, + prevTdCamera: CameraData, + tdCamera: CameraData, +): Array { + let actions: Array = []; + + if (prevFlycam !== flycam) { + actions = actions.concat( + updateCameraAnnotation( + getFlooredPosition(flycam), + flycam.additionalCoordinates, + getRotation(flycam), + flycam.zoomStep, + ), + ); + } + + if (prevTdCamera !== tdCamera) { + actions = actions.concat(updateTdCamera()); + } + + return actions; +} diff --git a/frontend/javascripts/viewer/model/sagas/saving/save_saga.ts b/frontend/javascripts/viewer/model/sagas/saving/save_saga.ts new file mode 100644 index 00000000000..e92be39f1e6 --- /dev/null +++ b/frontend/javascripts/viewer/model/sagas/saving/save_saga.ts @@ -0,0 +1,389 @@ +import { getUpdateActionLog } from "admin/rest_api"; +import ErrorHandling from "libs/error_handling"; +import Toast from "libs/toast"; +import { sleep } from "libs/utils"; +import _ from "lodash"; +import { call, fork, put, takeEvery } from "typed-redux-saga"; +import type { APIUpdateActionBatch } from "types/api_types"; +import { getLayerByName, getMappingInfo } from "viewer/model/accessors/dataset_accessor"; +import { setVersionNumberAction } from "viewer/model/actions/save_actions"; +import { applySkeletonUpdateActionsFromServerAction } from "viewer/model/actions/skeletontracing_actions"; +import { applyVolumeUpdateActionsFromServerAction } from "viewer/model/actions/volumetracing_actions"; +import { globalPositionToBucketPositionWithMag } from "viewer/model/helpers/position_converter"; +import type { Saga } from "viewer/model/sagas/effect-generators"; +import { select } from "viewer/model/sagas/effect-generators"; +import { ensureWkReady } from "viewer/model/sagas/ready_sagas"; +import { Model } from "viewer/singletons"; +import type { SkeletonTracing, VolumeTracing } from "viewer/store"; +import { takeEveryWithBatchActionSupport } from "../saga_helpers"; +import { updateLocalHdf5Mapping } from "../volume/mapping_saga"; +import { + removeAgglomerateFromActiveMapping, + updateMappingWithMerge, +} from "../volume/proofread_saga"; +import { pushSaveQueueAsync } from "./save_queue_draining"; +import { setupSavingForAnnotation, setupSavingForTracingType } from "./save_queue_filling"; + +export function* setupSavingToServer(): Saga { + // This saga continously drains the save queue by sending its content to the server. + yield* fork(pushSaveQueueAsync); + // The following sagas are responsible for filling the save queue with the update actions. + yield* takeEvery("INITIALIZE_ANNOTATION_WITH_TRACINGS", setupSavingForAnnotation); + yield* takeEveryWithBatchActionSupport("INITIALIZE_SKELETONTRACING", setupSavingForTracingType); + yield* takeEveryWithBatchActionSupport("INITIALIZE_VOLUMETRACING", setupSavingForTracingType); +} + +// todop: restore to 10, 60, 30 ? +const VERSION_POLL_INTERVAL_COLLAB = 1 * 1000; +const VERSION_POLL_INTERVAL_READ_ONLY = 1 * 1000; +const VERSION_POLL_INTERVAL_SINGLE_EDITOR = 1 * 1000; + +function* watchForSaveConflicts(): Saga { + function* checkForNewVersion(): Saga { + /* + * Checks whether there is a newer version on the server. If so, + * the saga tries to also update the current annotation to the newest + * state. + * If the update is not possible, the user will be notified that a newer + * version exists on the server. In that case, true will be returned (`didAskUserToRefreshPage`). + */ + const allowSave = yield* select( + (state) => + state.annotation.restrictions.allowSave && state.annotation.restrictions.allowUpdate, + ); + + // todop + if (false && allowSave) { + // The active user is currently the only one that is allowed to mutate the annotation. + // Since we only acquire the mutex upon page load, there shouldn't be any unseen updates + // between the page load and this check here. + // A race condition where + // 1) another user saves version X + // 2) we load the annotation but only get see version X - 1 (this is the race) + // 3) we acquire a mutex + // should not occur, because there is a grace period for which the mutex has to be free until it can + // be acquired again (see annotation.mutex.expiryTime in application.conf). + // The downside of an early return here is that we won't be able to warn the user early + // if the user opened the annotation in two tabs and mutated it there. + // However, + // a) this scenario is pretty rare and the worst case is that they get a 409 error + // during saving and + // b) checking for newer versions when the active user may update the annotation introduces + // a race condition between this saga and the actual save saga. Synchronizing these sagas + // would be possible, but would add further complexity to the mission critical save saga. + return false; + } + + const maybeSkeletonTracing = yield* select((state) => state.annotation.skeleton); + const volumeTracings = yield* select((state) => state.annotation.volumes); + const tracingStoreUrl = yield* select((state) => state.annotation.tracingStore.url); + const annotationId = yield* select((state) => state.annotation.annotationId); + + const tracings: Array = _.compact([ + ...volumeTracings, + maybeSkeletonTracing, + ]); + + if (tracings.length === 0) { + return false; + } + + const versionOnClient = yield* select((state) => { + return state.annotation.version; + }); + + // Fetch all update actions that belong to a version that is newer than + // versionOnClient. If there are none, the array will be empty. + // The order is ascending in the version number ([v_n, v_(n+1), ...]). + const newerActions = yield* call( + getUpdateActionLog, + tracingStoreUrl, + annotationId, + versionOnClient + 1, + undefined, + true, + ); + + const toastKey = "save_conflicts_warning"; + if (newerActions.length > 0) { + try { + if ((yield* tryToIncorporateActions(newerActions)).success) { + return false; + } + } catch (exc) { + // Afterwards, the user will be asked to reload the page. + console.error("Error during application of update actions", exc); + } + + const saveQueue = yield* select((state) => state.save.queue); + + let msg = ""; + if (!allowSave) { + msg = + "A newer version of this annotation was found on the server. Reload the page to see the newest changes."; + } else if (saveQueue.length > 0) { + msg = + "A newer version of this annotation was found on the server. Your current changes to this annotation cannot be saved anymore."; + } else { + msg = + "A newer version of this annotation was found on the server. Please reload the page to see the newer version. Otherwise, changes to the annotation cannot be saved anymore."; + } + Toast.warning(msg, { + sticky: true, + key: toastKey, + }); + return true; + } else { + Toast.close(toastKey); + } + return false; + } + + function* getPollInterval(): Saga { + const allowSave = yield* select((state) => state.annotation.restrictions.allowSave); + if (!allowSave) { + // The current user may not edit/save the annotation. + return VERSION_POLL_INTERVAL_READ_ONLY; + } + + const othersMayEdit = yield* select((state) => state.annotation.othersMayEdit); + if (othersMayEdit) { + // Other users may edit the annotation. + return VERSION_POLL_INTERVAL_COLLAB; + } + + // The current user is the only one who can edit the annotation. + return VERSION_POLL_INTERVAL_SINGLE_EDITOR; + } + + yield* call(ensureWkReady); + + while (true) { + const interval = yield* call(getPollInterval); + yield* call(sleep, interval); + if (yield* select((state) => state.uiInformation.showVersionRestore)) { + continue; + } + try { + const didAskUserToRefreshPage = yield* call(checkForNewVersion); + if (didAskUserToRefreshPage) { + // The user was already notified about the current annotation being outdated. + // There is not much else we can do now. Sleep for 5 minutes. + yield* call(sleep, 5 * 60 * 1000); + } + } catch (exception) { + // If the version check fails for some reason, we don't want to crash the entire + // saga. + console.warn(exception); + // @ts-ignore + ErrorHandling.notify(exception); + Toast.error( + "An unrecoverable error occurred while synchronizing this annotation. Please refresh the page.", + ); + // A hard error was thrown. Terminate this saga. + break; + } + } +} + +export function* tryToIncorporateActions( + newerActions: APIUpdateActionBatch[], +): Saga<{ success: boolean }> { + // After all actions were incorporated, volume buckets and hdf5 mappings + // are reloaded (if they exist and necessary). This is done as a + // "finalization step", because it requires that the newest version is set + // in the store annotation. Also, it only needs to happen once (instead of + // per action). + const updateLocalHdf5FunctionByTracing: Record unknown> = {}; + const refreshLayerFunctionByTracing: Record unknown> = {}; + function* finalize() { + for (const fn of Object.values(updateLocalHdf5FunctionByTracing).concat( + Object.values(refreshLayerFunctionByTracing), + )) { + yield* call(fn); + } + } + for (const actionBatch of newerActions) { + for (const action of actionBatch.value) { + switch (action.name) { + ///////////// + // Updates to user-specific state can be ignored: + // Camera + case "updateCamera": + case "updateTdCamera": + // Active items + case "updateActiveNode": + case "updateActiveSegmentId": + // Visibilities + case "updateTreeVisibility": + case "updateTreeGroupVisibility": + case "updateSegmentVisibility": + case "updateSegmentGroupVisibility": + case "updateUserBoundingBoxVisibilityInSkeletonTracing": + case "updateUserBoundingBoxVisibilityInVolumeTracing": + // Group expansion + case "updateTreeGroupsExpandedState": + case "updateSegmentGroupsExpandedState": { + break; + } + ///////////// + // Skeleton + ///////////// + case "createTree": + case "updateTree": + case "createNode": + case "createEdge": + case "updateNode": + case "moveTreeComponent": + case "deleteTree": + case "deleteEdge": + case "deleteNode": + case "updateTreeEdgesVisibility": + case "updateTreeGroups": + // Skeleton User Bounding Boxes + case "addUserBoundingBoxInSkeletonTracing": + case "updateUserBoundingBoxInSkeletonTracing": + case "deleteUserBoundingBoxInSkeletonTracing": { + yield* put(applySkeletonUpdateActionsFromServerAction([action])); + break; + } + + ///////////// + // Volume + ///////////// + case "updateBucket": { + const { value } = action; + const cube = Model.getCubeByLayerName(value.actionTracingId); + + const dataLayer = Model.getLayerByName(value.actionTracingId); + const bucketAddress = globalPositionToBucketPositionWithMag( + value.position, + value.mag, + value.additionalCoordinates, + ); + + const bucket = cube.getBucket(bucketAddress); + if (bucket != null && bucket.type !== "null") { + cube.removeBucket(bucket); + refreshLayerFunctionByTracing[value.actionTracingId] = () => { + dataLayer.layerRenderingManager.refresh(); + }; + } + break; + } + case "deleteSegmentData": { + const { value } = action; + const { actionTracingId, id } = value; + const cube = Model.getCubeByLayerName(actionTracingId); + const dataLayer = Model.getLayerByName(actionTracingId); + + cube.removeBucketsIf((bucket) => bucket.containsValue(id)); + refreshLayerFunctionByTracing[value.actionTracingId] = () => { + dataLayer.layerRenderingManager.refresh(); + }; + break; + } + case "updateLargestSegmentId": + case "createSegment": + case "deleteSegment": + case "updateSegment": + case "updateSegmentGroups": + // Volume User Bounding Boxes + case "addUserBoundingBoxInVolumeTracing": + case "deleteUserBoundingBoxInVolumeTracing": + case "updateUserBoundingBoxInVolumeTracing": { + yield* put(applyVolumeUpdateActionsFromServerAction([action])); + break; + } + + // Proofreading + case "mergeAgglomerate": { + const activeMapping = yield* select( + (store) => + store.temporaryConfiguration.activeMappingByLayer[action.value.actionTracingId], + ); + yield* call( + updateMappingWithMerge, + action.value.actionTracingId, + activeMapping, + action.value.agglomerateId1, + action.value.agglomerateId2, + ); + break; + } + case "splitAgglomerate": { + const activeMapping = yield* select( + (store) => + store.temporaryConfiguration.activeMappingByLayer[action.value.actionTracingId], + ); + yield* call( + removeAgglomerateFromActiveMapping, + action.value.actionTracingId, + activeMapping, + action.value.agglomerateId, + ); + + const layerName = action.value.actionTracingId; + + const mappingInfo = yield* select((state) => + getMappingInfo(state.temporaryConfiguration.activeMappingByLayer, layerName), + ); + const { mappingName } = mappingInfo; + + if (mappingName == null) { + throw new Error( + "Could not apply splitAgglomerate because no active mapping was found.", + ); + } + + const dataset = yield* select((state) => state.dataset); + const layerInfo = getLayerByName(dataset, layerName); + + updateLocalHdf5FunctionByTracing[layerName] = function* () { + yield* call(updateLocalHdf5Mapping, layerName, layerInfo, mappingName); + }; + + break; + } + + /* + * Currently NOT supported: + */ + + // High-level annotation specific + case "addLayerToAnnotation": + case "addSegmentIndex": + case "createTracing": + case "deleteLayerFromAnnotation": + case "importVolumeTracing": + case "revertToVersion": + case "updateLayerMetadata": + case "updateMetadataOfAnnotation": + + // Volume + case "removeFallbackLayer": + case "updateMappingName": // Refactor mapping activation first before implementing this. + + // Legacy! The following actions are legacy actions and don't + // need to be supported. + case "mergeTree": + case "updateSkeletonTracing": + case "updateVolumeTracing": + case "updateUserBoundingBoxesInSkeletonTracing": + case "updateUserBoundingBoxesInVolumeTracing": { + console.log("Cannot apply action", action.name); + yield* call(finalize); + return { success: false }; + } + default: { + action satisfies never; + } + } + } + yield* put(setVersionNumberAction(actionBatch.version)); + } + yield* call(finalize); + return { success: true }; +} + +export default [setupSavingToServer, watchForSaveConflicts]; diff --git a/frontend/javascripts/viewer/model/sagas/save_saga_constants.ts b/frontend/javascripts/viewer/model/sagas/saving/save_saga_constants.ts similarity index 91% rename from frontend/javascripts/viewer/model/sagas/save_saga_constants.ts rename to frontend/javascripts/viewer/model/sagas/saving/save_saga_constants.ts index dbc7dba6729..0f9a230704e 100644 --- a/frontend/javascripts/viewer/model/sagas/save_saga_constants.ts +++ b/frontend/javascripts/viewer/model/sagas/saving/save_saga_constants.ts @@ -1,6 +1,7 @@ // The save saga uses a retry mechanism which is based // on exponential back-off. -export const PUSH_THROTTLE_TIME = 30000; // 30s +// todop: restore to 30s +export const PUSH_THROTTLE_TIME = 5000; // 30s export const SAVE_RETRY_WAITING_TIME = 2000; export const MAX_SAVE_RETRY_WAITING_TIME = 300000; // 5m diff --git a/frontend/javascripts/viewer/model/sagas/settings_saga.ts b/frontend/javascripts/viewer/model/sagas/settings_saga.ts index 78517a6558d..d1ad1e9702e 100644 --- a/frontend/javascripts/viewer/model/sagas/settings_saga.ts +++ b/frontend/javascripts/viewer/model/sagas/settings_saga.ts @@ -13,7 +13,7 @@ import { type Saga, select, take } from "viewer/model/sagas/effect-generators"; import { SETTINGS_MAX_RETRY_COUNT, SETTINGS_RETRY_DELAY, -} from "viewer/model/sagas/save_saga_constants"; +} from "viewer/model/sagas/saving/save_saga_constants"; import type { DatasetConfiguration, DatasetLayerConfiguration } from "viewer/store"; import { Toolkit } from "../accessors/tool_accessor"; import { ensureWkReady } from "./ready_sagas"; diff --git a/frontend/javascripts/viewer/model/sagas/skeletontracing_saga.ts b/frontend/javascripts/viewer/model/sagas/skeletontracing_saga.ts index 8c17a138cad..a297aaa87a8 100644 --- a/frontend/javascripts/viewer/model/sagas/skeletontracing_saga.ts +++ b/frontend/javascripts/viewer/model/sagas/skeletontracing_saga.ts @@ -55,7 +55,7 @@ import { } from "viewer/model/reducers/skeletontracing_reducer_helpers"; import type { Saga } from "viewer/model/sagas/effect-generators"; import { select } from "viewer/model/sagas/effect-generators"; -import type { UpdateActionWithoutIsolationRequirement } from "viewer/model/sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "viewer/model/sagas/volume/update_actions"; import { createEdge, createNode, @@ -70,7 +70,7 @@ import { updateTreeGroups, updateTreeGroupsExpandedState, updateTreeVisibility, -} from "viewer/model/sagas/update_actions"; +} from "viewer/model/sagas/volume/update_actions"; import { api } from "viewer/singletons"; import type { SkeletonTracing, WebknossosState } from "viewer/store"; import Store from "viewer/store"; diff --git a/frontend/javascripts/viewer/model/sagas/undo_saga.ts b/frontend/javascripts/viewer/model/sagas/undo_saga.ts index f50666c591a..7264d24532f 100644 --- a/frontend/javascripts/viewer/model/sagas/undo_saga.ts +++ b/frontend/javascripts/viewer/model/sagas/undo_saga.ts @@ -38,7 +38,7 @@ import { } from "viewer/model/actions/volumetracing_actions"; import type { Saga } from "viewer/model/sagas/effect-generators"; import { select } from "viewer/model/sagas/effect-generators"; -import { UNDO_HISTORY_SIZE } from "viewer/model/sagas/save_saga_constants"; +import { UNDO_HISTORY_SIZE } from "viewer/model/sagas/saving/save_saga_constants"; import { Model } from "viewer/singletons"; import type { SegmentGroup, SegmentMap, SkeletonTracing, UserBoundingBox } from "viewer/store"; import type BucketSnapshot from "../bucket_data_handling/bucket_snapshot"; diff --git a/frontend/javascripts/viewer/model/sagas/volume/floodfill_saga.tsx b/frontend/javascripts/viewer/model/sagas/volume/floodfill_saga.tsx index c97cb0c431f..64c69d3380c 100644 --- a/frontend/javascripts/viewer/model/sagas/volume/floodfill_saga.tsx +++ b/frontend/javascripts/viewer/model/sagas/volume/floodfill_saga.tsx @@ -4,14 +4,8 @@ import Toast from "libs/toast"; import * as Utils from "libs/utils"; import _ from "lodash"; import { call, put, takeEvery } from "typed-redux-saga"; -import type { - BoundingBoxType, - FillMode, - LabeledVoxelsMap, - OrthoView, - Vector2, - Vector3, -} from "viewer/constants"; +import type { BoundingBoxMinMaxType } from "types/bounding_box"; +import type { FillMode, LabeledVoxelsMap, OrthoView, Vector2, Vector3 } from "viewer/constants"; import Constants, { FillModeEnum, Unicode } from "viewer/constants"; import getSceneController from "viewer/controller/scene_controller_provider"; import { getDatasetBoundingBox, getMagInfo } from "viewer/model/accessors/dataset_accessor"; @@ -132,7 +126,7 @@ function* getBoundingBoxForFloodFill( position: Vector3, currentViewport: OrthoView, finestSegmentationLayerMag: Vector3, -): Saga { +): Saga { const isRestrictedToBoundingBox = yield* select( (state) => state.userConfiguration.isFloodfillRestrictedToBoundingBox, ); @@ -346,7 +340,7 @@ function* notifyUserAboutResult( startTimeOfFloodfill: number, progressCallback: ProgressCallback, fillMode: FillMode, - coveredBoundingBox: BoundingBoxType, + coveredBoundingBox: BoundingBoxMinMaxType, oldSegmentIdAtSeed: number, activeCellId: number, seedPosition: Vector3, diff --git a/frontend/javascripts/viewer/model/sagas/mapping_saga.ts b/frontend/javascripts/viewer/model/sagas/volume/mapping_saga.ts similarity index 92% rename from frontend/javascripts/viewer/model/sagas/mapping_saga.ts rename to frontend/javascripts/viewer/model/sagas/volume/mapping_saga.ts index 303cac69709..f3169ad72d2 100644 --- a/frontend/javascripts/viewer/model/sagas/mapping_saga.ts +++ b/frontend/javascripts/viewer/model/sagas/volume/mapping_saga.ts @@ -69,15 +69,17 @@ import type { NumberLike, NumberLikeMap, } from "viewer/store"; -import type { Action } from "../actions/actions"; -import { updateSegmentAction } from "../actions/volumetracing_actions"; -import type DataCube from "../bucket_data_handling/data_cube"; -import { listenToStoreProperty } from "../helpers/listener_helpers"; -import { ensureWkReady } from "./ready_sagas"; +import type { Action } from "../../actions/actions"; +import { updateSegmentAction } from "../../actions/volumetracing_actions"; +import type DataCube from "../../bucket_data_handling/data_cube"; +import { listenToStoreProperty } from "../../helpers/listener_helpers"; +import { ensureWkReady } from "../ready_sagas"; type APIMappings = Record; type Container = { value: T }; +const BUCKET_WATCHING_THROTTLE_DELAY = process.env.IS_TESTING ? 5 : 500; + const takeLatestMappingChange = ( oldActiveMappingByLayer: Container>, layerName: string, @@ -226,38 +228,49 @@ function createBucketRetrievalSourceChannel(layerName: string) { } function* watchChangedBucketsForLayer(layerName: string): Saga { + /* + * This saga listens for changed bucket data and then triggers the updateLocalHdf5Mapping + * saga in an interruptible manner. See comments below for some rationale. + */ const dataCube = yield* call([Model, Model.getCubeByLayerName], layerName); const bucketChannel = yield* call(createBucketDataChangedChannel, dataCube); + // Also update the local hdf5 mapping by inspecting all already existing + // buckets (likely, there are none yet because all buckets were reloaded, but + // it's still safer to do this here). + yield* call(startInterruptibleUpdateMapping); + while (true) { yield take(bucketChannel); - // We received a BUCKET_DATA_CHANGED event. `handler` needs to be invoked. + // We received a BUCKET_DATA_CHANGED event. `startInterruptibleUpdateMapping` needs + // to be invoked. // However, let's throttle¹ this by waiting and then discarding all other events // that might have accumulated in between. - yield* call(sleep, 500); + + yield* call(sleep, BUCKET_WATCHING_THROTTLE_DELAY); yield flush(bucketChannel); - // After flushing and while the handler below is running, + // After flushing and while the startInterruptibleUpdateMapping below is running, // the bucketChannel might fill up again. This means, the // next loop will immediately take from the channel which // is what we need. - yield* call(handler); + yield* call(startInterruptibleUpdateMapping); // Addendum: // ¹ We don't use redux-saga's throttle, because that would - // call `handler` in parallel if enough events are + // call `startInterruptibleUpdateMapping` in parallel if enough events are // consumed over the throttling duration. - // However, running `handler` in parallel would be a waste - // of computation. Therefore, we invoke `handler` strictly + // However, running `startInterruptibleUpdateMapping` in parallel would be a waste + // of computation. Therefore, we invoke `startInterruptibleUpdateMapping` strictly // sequentially. } - function* handler() { + function* startInterruptibleUpdateMapping() { const dataset = yield* select((state) => state.dataset); const layerInfo = getLayerByName(dataset, layerName); const mappingInfo = yield* select((state) => getMappingInfo(state.temporaryConfiguration.activeMappingByLayer, layerName), ); - const { mappingName, mappingType, mappingStatus } = mappingInfo; + const { mappingName, mappingStatus } = mappingInfo; if (mappingName == null || mappingStatus !== MappingStatusEnum.ENABLED) { return; @@ -271,7 +284,7 @@ function* watchChangedBucketsForLayer(layerName: string): Saga { let isBusy = yield* select((state) => state.uiInformation.busyBlockingInfo.isBusy); if (!isBusy) { const { cancel } = yield* race({ - updateHdf5: call(updateLocalHdf5Mapping, layerName, layerInfo, mappingName, mappingType), + updateHdf5: call(updateLocalHdf5Mapping, layerName, layerInfo, mappingName), cancel: take( ((action: Action) => action.type === "SET_BUSY_BLOCKING_INFO_ACTION" && @@ -407,7 +420,6 @@ function* handleSetMapping( layerName, layerInfo, mappingName, - mappingType, action, oldActiveMappingByLayer, ); @@ -418,23 +430,21 @@ function* handleSetHdf5Mapping( layerName: string, layerInfo: APIDataLayer, mappingName: string, - mappingType: MappingType, action: SetMappingAction, oldActiveMappingByLayer: Container>, ): Saga { if (yield* select((state) => getNeedsLocalHdf5Mapping(state, layerName))) { - yield* call(updateLocalHdf5Mapping, layerName, layerInfo, mappingName, mappingType); + yield* call(updateLocalHdf5Mapping, layerName, layerInfo, mappingName); } else { // An HDF5 mapping was set that is applied remotely. A reload is necessary. yield* call(reloadData, oldActiveMappingByLayer, action); } } -function* updateLocalHdf5Mapping( +export function* updateLocalHdf5Mapping( layerName: string, layerInfo: APIDataLayer, mappingName: string, - mappingType: MappingType, ): Saga { const dataset = yield* select((state) => state.dataset); const annotation = yield* select((state) => state.annotation); @@ -467,6 +477,7 @@ function* updateLocalHdf5Mapping( intersection: mutableRemainingEntries, } = fastDiffSetAndMap(segmentIds as Set, previousMapping); + // todop: does this crash wk if the request fails? const newEntries = editableMapping != null ? yield* call( @@ -500,7 +511,12 @@ function* updateLocalHdf5Mapping( onlyB: newSegmentIds, }); - yield* put(setMappingAction(layerName, mappingName, mappingType, { mapping })); + yield* put(setMappingAction(layerName, mappingName, "HDF5", { mapping })); + if (process.env.IS_TESTING) { + // in test context, the mapping.ts code is not executed (which is usually responsible + // for finishing the initialization). + yield put(finishMappingInitializationAction(layerName)); + } } function* handleSetJsonMapping( diff --git a/frontend/javascripts/viewer/model/sagas/min_cut_saga.ts b/frontend/javascripts/viewer/model/sagas/volume/min_cut_saga.ts similarity index 98% rename from frontend/javascripts/viewer/model/sagas/min_cut_saga.ts rename to frontend/javascripts/viewer/model/sagas/volume/min_cut_saga.ts index 5cf4d437262..9cf7a8ecb0f 100644 --- a/frontend/javascripts/viewer/model/sagas/min_cut_saga.ts +++ b/frontend/javascripts/viewer/model/sagas/volume/min_cut_saga.ts @@ -7,7 +7,8 @@ import _ from "lodash"; import { call, put } from "typed-redux-saga"; import type { APISegmentationLayer } from "types/api_types"; import type { AdditionalCoordinate } from "types/api_types"; -import type { BoundingBoxType, TypedArray, Vector3 } from "viewer/constants"; +import type { BoundingBoxMinMaxType } from "types/bounding_box"; +import type { TypedArray, Vector3 } from "viewer/constants"; import { getMagInfo } from "viewer/model/accessors/dataset_accessor"; import { enforceActiveVolumeTracing, @@ -17,12 +18,12 @@ import type { Action } from "viewer/model/actions/actions"; import { addUserBoundingBoxAction } from "viewer/model/actions/annotation_actions"; import { finishAnnotationStrokeAction } from "viewer/model/actions/volumetracing_actions"; import BoundingBox from "viewer/model/bucket_data_handling/bounding_box"; +import type { MagInfo } from "viewer/model/helpers/mag_info"; import type { Saga } from "viewer/model/sagas/effect-generators"; import { select } from "viewer/model/sagas/effect-generators"; import { takeEveryUnlessBusy } from "viewer/model/sagas/saga_helpers"; import type { MutableNode, Node } from "viewer/model/types/tree_types"; import { api } from "viewer/singletons"; -import type { MagInfo } from "../helpers/mag_info"; // By default, a new bounding box is created around // the seed nodes with a padding. Within the bounding box @@ -170,7 +171,10 @@ function removeOutgoingEdge(edgeBuffer: Uint16Array, idx: number, neighborIdx: n edgeBuffer[idx] &= ~(2 ** neighborIdx); } -export function isBoundingBoxUsableForMinCut(boundingBoxObj: BoundingBoxType, nodes: Array) { +export function isBoundingBoxUsableForMinCut( + boundingBoxObj: BoundingBoxMinMaxType, + nodes: Array, +) { const bbox = new BoundingBox(boundingBoxObj); return ( bbox.containsPoint(nodes[0].untransformedPosition) && diff --git a/frontend/javascripts/viewer/model/sagas/proofread_saga.ts b/frontend/javascripts/viewer/model/sagas/volume/proofread_saga.ts similarity index 92% rename from frontend/javascripts/viewer/model/sagas/proofread_saga.ts rename to frontend/javascripts/viewer/model/sagas/volume/proofread_saga.ts index edd25a2780b..62eb8e7b7fb 100644 --- a/frontend/javascripts/viewer/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/viewer/model/sagas/volume/proofread_saga.ts @@ -8,6 +8,7 @@ import { import { V3 } from "libs/mjs"; import Toast from "libs/toast"; import { SoftError, isBigInt, isNumberMap } from "libs/utils"; +import window from "libs/window"; import _ from "lodash"; import { all, call, put, spawn, takeEvery } from "typed-redux-saga"; import type { AdditionalCoordinate, ServerEditableMapping } from "types/api_types"; @@ -68,14 +69,14 @@ import { type UpdateActionWithoutIsolationRequirement, mergeAgglomerate, splitAgglomerate, -} from "viewer/model/sagas/update_actions"; +} from "viewer/model/sagas/volume/update_actions"; import { Model, Store, api } from "viewer/singletons"; import type { ActiveMappingInfo, Mapping, NumberLikeMap, VolumeTracing } from "viewer/store"; -import { getCurrentMag } from "../accessors/flycam_accessor"; -import type { Action } from "../actions/actions"; -import type { Tree } from "../types/tree_types"; -import { ensureWkReady } from "./ready_sagas"; -import { takeEveryUnlessBusy, takeWithBatchActionSupport } from "./saga_helpers"; +import { getCurrentMag } from "../../accessors/flycam_accessor"; +import type { Action } from "../../actions/actions"; +import type { Tree } from "../../types/tree_types"; +import { ensureWkReady } from "../ready_sagas"; +import { takeEveryUnlessBusy, takeWithBatchActionSupport } from "../saga_helpers"; function runSagaAndCatchSoftError(saga: (...args: any[]) => Saga) { return function* (...args: any[]) { @@ -357,6 +358,7 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { const { agglomerateFileMag, getDataValue, activeMapping, volumeTracing } = preparation; const { tracingId: volumeTracingId } = volumeTracing; + const annotationId = yield* select((state) => state.annotation.annotationId); // Use untransformedPosition because agglomerate trees should not have // any transforms, anyway. if (yield* select((state) => areGeometriesTransformed(state))) { @@ -396,16 +398,12 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { volumeTracingId, ), ); - const mergedMapping = yield* call( - mergeAgglomeratesInMapping, + yield* call( + updateMappingWithMerge, + volumeTracingId, activeMapping, - targetAgglomerateId, sourceAgglomerateId, - ); - yield* put( - setMappingAction(volumeTracingId, activeMapping.mappingName, activeMapping.mappingType, { - mapping: mergedMapping, - }), + targetAgglomerateId, ); } else if (action.type === "DELETE_EDGE") { if (sourceAgglomerateId !== targetAgglomerateId) { @@ -444,6 +442,8 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { yield* put(pushSaveQueueTransaction(items)); yield* call([Model, Model.ensureSavedState]); + // todop + // yield* call(releaseAnnotationMutex, annotationId); if (action.type === "MIN_CUT_AGGLOMERATE_WITH_NODE_IDS" || action.type === "DELETE_EDGE") { if (sourceAgglomerateId !== targetAgglomerateId) { @@ -693,6 +693,7 @@ function* handleProofreadMergeOrMinCut(action: Action) { const allowUpdate = yield* select((state) => state.annotation.restrictions.allowUpdate); if (!allowUpdate) return; + const annotationId = yield* select((state) => state.annotation.annotationId); const preparation = yield* call(prepareSplitOrMerge, false); if (!preparation) { return; @@ -702,6 +703,7 @@ function* handleProofreadMergeOrMinCut(action: Action) { const idInfos = yield* call(gatherInfoForOperation, action, preparation); if (idInfos == null) { + console.warn("[Proofreading] Could not gather id infos."); return; } const [sourceInfo, targetInfo] = idInfos; @@ -741,17 +743,12 @@ function* handleProofreadMergeOrMinCut(action: Action) { sourceInfo.unmappedId, targetInfo.unmappedId, ); - const mergedMapping = yield* call( - mergeAgglomeratesInMapping, + yield* call( + updateMappingWithMerge, + volumeTracingId, activeMapping, - targetAgglomerateId, sourceAgglomerateId, - ); - - yield* put( - setMappingAction(volumeTracingId, activeMapping.mappingName, activeMapping.mappingType, { - mapping: mergedMapping, - }), + targetAgglomerateId, ); } else if (action.type === "MIN_CUT_AGGLOMERATE") { if (sourceInfo.unmappedId === targetInfo.unmappedId) { @@ -782,6 +779,8 @@ function* handleProofreadMergeOrMinCut(action: Action) { yield* put(pushSaveQueueTransaction(items)); yield* call([Model, Model.ensureSavedState]); + // todop + // yield* call(releaseAnnotationMutex, annotationId); if (action.type === "MIN_CUT_AGGLOMERATE") { console.log("start updating the mapping after a min-cut"); @@ -886,6 +885,7 @@ function* handleProofreadCutFromNeighbors(action: Action) { const allowUpdate = yield* select((state) => state.annotation.restrictions.allowUpdate); if (!allowUpdate) return; + const annotationId = yield* select((state) => state.annotation.annotationId); const preparation = yield* call(prepareSplitOrMerge, false); if (!preparation) { return; @@ -940,6 +940,8 @@ function* handleProofreadCutFromNeighbors(action: Action) { yield* put(pushSaveQueueTransaction(items)); yield* call([Model, Model.ensureSavedState]); + // todop + // yield* call(releaseAnnotationMutex, annotationId); // Now that the changes are saved, we can split the mapping locally (because it requires // communication with the back-end). @@ -1119,10 +1121,18 @@ function* prepareSplitOrMerge(isSkeletonProofreading: boolean): Saga state.annotation.annotationId); + // const { canEdit } = yield* call(acquireAnnotationMutex, annotationId); + // if (!canEdit) { + // Toast.error("Could not acquire mutex. Somebody else is proofreading at the moment."); + // return null; + // } + return { agglomerateFileMag, getDataValue, @@ -1249,10 +1259,9 @@ function* getPositionForSegmentId(volumeTracing: VolumeTracing, segmentId: numbe return position; } -function* splitAgglomerateInMapping( +function getSegmentIdsThatMapToAgglomerate( activeMapping: ActiveMappingInfo, sourceAgglomerateId: number, - volumeTracingId: string, ) { // Obtain all segment ids that map to sourceAgglomerateId const mappingEntries = Array.from(activeMapping.mapping as NumberLikeMap); @@ -1264,10 +1273,17 @@ function* splitAgglomerateInMapping( // If the mapping contains BigInts, we need a BigInt for the filtering const comparableSourceAgglomerateId = adaptToType(sourceAgglomerateId); - const splitSegmentIds = mappingEntries + return mappingEntries .filter(([_segmentId, agglomerateId]) => agglomerateId === comparableSourceAgglomerateId) .map(([segmentId, _agglomerateId]) => segmentId); +} +function* splitAgglomerateInMapping( + activeMapping: ActiveMappingInfo, + sourceAgglomerateId: number, + volumeTracingId: string, +) { + const splitSegmentIds = getSegmentIdsThatMapToAgglomerate(activeMapping, sourceAgglomerateId); const annotationId = yield* select((state) => state.annotation.annotationId); const tracingStoreUrl = yield* select((state) => state.annotation.tracingStore.url); // Ask the server to map the (split) segment ids. This creates a partial mapping @@ -1292,13 +1308,14 @@ function* splitAgglomerateInMapping( return [segmentId, agglomerateId]; }), ) as Mapping; + return splitMapping; } function mergeAgglomeratesInMapping( activeMapping: ActiveMappingInfo, - targetAgglomerateId: number, sourceAgglomerateId: number, + targetAgglomerateId: number, ): Mapping { const adaptToType = activeMapping.mapping && isNumberMap(activeMapping.mapping) @@ -1314,6 +1331,60 @@ function mergeAgglomeratesInMapping( ) as Mapping; } +export function* updateMappingWithMerge( + volumeTracingId: string, + activeMapping: ActiveMappingInfo, + sourceAgglomerateId: number, + targetAgglomerateId: number, +) { + const mergedMapping = yield* call( + mergeAgglomeratesInMapping, + activeMapping, + sourceAgglomerateId, + targetAgglomerateId, + ); + yield* put( + setMappingAction(volumeTracingId, activeMapping.mappingName, activeMapping.mappingType, { + mapping: mergedMapping, + }), + ); +} + +export function* removeAgglomerateFromActiveMapping( + volumeTracingId: string, + activeMapping: ActiveMappingInfo, + agglomerateId: number, +) { + /* + * This function removes all super-voxels segments from the active mapping + * that map to the specified agglomerateId. + */ + + const mappingEntries = Array.from(activeMapping.mapping as NumberLikeMap); + + const adaptToType = + mappingEntries.length > 0 && isBigInt(mappingEntries[0][0]) + ? (el: number) => BigInt(el) + : (el: number) => el; + // If the mapping contains BigInts, we need a BigInt for the filtering + const comparableSourceAgglomerateId = adaptToType(agglomerateId); + + const newMapping = new Map(); + + for (const entry of mappingEntries) { + const [key, value] = entry; + if (value !== comparableSourceAgglomerateId) { + newMapping.set(key, value); + } + } + + yield* put( + setMappingAction(volumeTracingId, activeMapping.mappingName, activeMapping.mappingType, { + mapping: newMapping, + }), + ); +} + function* gatherInfoForOperation( action: ProofreadMergeAction | MinCutAgglomerateWithPositionAction, preparation: Preparation, @@ -1324,13 +1395,22 @@ function* gatherInfoForOperation( }> | null> { const { volumeTracing } = preparation; const { tracingId: volumeTracingId, activeCellId, activeUnmappedSegmentId } = volumeTracing; - if (activeCellId === 0) return null; + if (activeCellId === 0) { + console.warn("[Proofreading] Cannot execute operation because active segment id is 0"); + return null; + } const segments = yield* select((store) => getSegmentsForLayer(store, volumeTracingId)); const activeSegment = segments.getNullable(activeCellId); - if (activeSegment == null) return null; + if (activeSegment == null) { + console.warn("[Proofreading] Cannot execute operation because no active segment item exists"); + return null; + } const activeSegmentPositionFloat = activeSegment.somePosition; - if (activeSegmentPositionFloat == null) return null; + if (activeSegmentPositionFloat == null) { + console.warn("[Proofreading] Cannot execute operation because active segment has no position"); + return null; + } const activeSegmentPosition = V3.floor(activeSegmentPositionFloat); @@ -1354,6 +1434,9 @@ function* gatherInfoForOperation( targetPosition, ]); if (idInfos == null) { + console.warn( + "[Proofreading] Cannot execute operation because agglomerate infos couldn't be determined for source and target position.", + ); return null; } const [idInfo1, idInfo2] = idInfos; diff --git a/frontend/javascripts/viewer/model/sagas/quick_select_heuristic_saga.ts b/frontend/javascripts/viewer/model/sagas/volume/quick_select/quick_select_heuristic_saga.ts similarity index 97% rename from frontend/javascripts/viewer/model/sagas/quick_select_heuristic_saga.ts rename to frontend/javascripts/viewer/model/sagas/volume/quick_select/quick_select_heuristic_saga.ts index 6c775580077..66af30f6c3d 100644 --- a/frontend/javascripts/viewer/model/sagas/quick_select_heuristic_saga.ts +++ b/frontend/javascripts/viewer/model/sagas/volume/quick_select/quick_select_heuristic_saga.ts @@ -21,10 +21,24 @@ import ndarray from "ndarray"; import { call, put, race, take } from "typed-redux-saga"; import type { APIDataLayer, APIDataset } from "types/api_types"; import type { QuickSelectGeometry } from "viewer/geometries/helper_geometries"; +import { + getDefaultValueRangeOfLayer, + getEnabledColorLayers, + getLayerBoundingBox, + getMagInfo, +} from "viewer/model/accessors/dataset_accessor"; +import { getTransformsForLayer } from "viewer/model/accessors/dataset_layer_transformation_accessor"; +import { getActiveMagIndexForLayer } from "viewer/model/accessors/flycam_accessor"; import { getActiveSegmentationTracing, getSegmentationLayerForTracing, } from "viewer/model/accessors/volumetracing_accessor"; +import { updateUserSettingAction } from "viewer/model/actions/settings_actions"; +import { + type EnterAction, + type EscapeAction, + showQuickSelectSettingsAction, +} from "viewer/model/actions/ui_actions"; import { type CancelQuickSelectAction, type ComputeQuickSelectForPointAction, @@ -36,6 +50,7 @@ import { updateSegmentAction, } from "viewer/model/actions/volumetracing_actions"; import BoundingBox from "viewer/model/bucket_data_handling/bounding_box"; +import Dimensions, { type DimensionIndices } from "viewer/model/dimensions"; import type { Saga } from "viewer/model/sagas/effect-generators"; import { select } from "viewer/model/sagas/effect-generators"; import { api } from "viewer/singletons"; @@ -45,23 +60,8 @@ import type { VolumeTracing, WebknossosState, } from "viewer/store"; -import { - getDefaultValueRangeOfLayer, - getEnabledColorLayers, - getLayerBoundingBox, - getMagInfo, -} from "../accessors/dataset_accessor"; -import { getTransformsForLayer } from "../accessors/dataset_layer_transformation_accessor"; -import { getActiveMagIndexForLayer } from "../accessors/flycam_accessor"; -import { updateUserSettingAction } from "../actions/settings_actions"; -import { - type EnterAction, - type EscapeAction, - showQuickSelectSettingsAction, -} from "../actions/ui_actions"; -import Dimensions, { type DimensionIndices } from "../dimensions"; -import { createVolumeLayer, labelWithVoxelBuffer2D } from "./volume/helpers"; -import { copyNdArray } from "./volume/volume_interpolation_saga"; +import { createVolumeLayer, labelWithVoxelBuffer2D } from "../helpers"; +import { copyNdArray } from "../volume_interpolation_saga"; const TOAST_KEY = "QUICKSELECT_PREVIEW_MESSAGE"; diff --git a/frontend/javascripts/viewer/model/sagas/quick_select_ml_saga.ts b/frontend/javascripts/viewer/model/sagas/volume/quick_select/quick_select_ml_saga.ts similarity index 98% rename from frontend/javascripts/viewer/model/sagas/quick_select_ml_saga.ts rename to frontend/javascripts/viewer/model/sagas/volume/quick_select/quick_select_ml_saga.ts index c9afb6ec3f0..49d8389c228 100644 --- a/frontend/javascripts/viewer/model/sagas/quick_select_ml_saga.ts +++ b/frontend/javascripts/viewer/model/sagas/volume/quick_select/quick_select_ml_saga.ts @@ -17,9 +17,9 @@ import BoundingBox from "viewer/model/bucket_data_handling/bounding_box"; import type { Saga } from "viewer/model/sagas/effect-generators"; import { select } from "viewer/model/sagas/effect-generators"; import type { WebknossosState } from "viewer/store"; -import { getPlaneExtentInVoxelFromStore } from "../accessors/view_mode_accessor"; -import { setGlobalProgressAction } from "../actions/ui_actions"; -import Dimensions from "../dimensions"; +import { getPlaneExtentInVoxelFromStore } from "../../../accessors/view_mode_accessor"; +import { setGlobalProgressAction } from "../../../actions/ui_actions"; +import Dimensions from "../../../dimensions"; import { finalizeQuickSelectForSlice, prepareQuickSelect } from "./quick_select_heuristic_saga"; const MAXIMUM_MASK_BASE = 1024; diff --git a/frontend/javascripts/viewer/model/sagas/quick_select_saga.ts b/frontend/javascripts/viewer/model/sagas/volume/quick_select/quick_select_saga.ts similarity index 94% rename from frontend/javascripts/viewer/model/sagas/quick_select_saga.ts rename to frontend/javascripts/viewer/model/sagas/volume/quick_select/quick_select_saga.ts index a1ca2b6c11e..324165db311 100644 --- a/frontend/javascripts/viewer/model/sagas/quick_select_saga.ts +++ b/frontend/javascripts/viewer/model/sagas/volume/quick_select/quick_select_saga.ts @@ -11,11 +11,11 @@ import { type Saga, select } from "viewer/model/sagas/effect-generators"; import getSceneController from "viewer/controller/scene_controller_provider"; import type { VolumeTracing } from "viewer/store"; -import { getActiveSegmentationTracing } from "../accessors/volumetracing_accessor"; -import { setBusyBlockingInfoAction, setQuickSelectStateAction } from "../actions/ui_actions"; +import { getActiveSegmentationTracing } from "../../../accessors/volumetracing_accessor"; +import { setBusyBlockingInfoAction, setQuickSelectStateAction } from "../../../actions/ui_actions"; +import { requestBucketModificationInVolumeTracing } from "../../saga_helpers"; import performQuickSelectHeuristic from "./quick_select_heuristic_saga"; import performQuickSelectML from "./quick_select_ml_saga"; -import { requestBucketModificationInVolumeTracing } from "./saga_helpers"; function* shouldUseHeuristic() { const useHeuristic = yield* select((state) => state.userConfiguration.quickSelect.useHeuristic); diff --git a/frontend/javascripts/viewer/model/sagas/update_actions.ts b/frontend/javascripts/viewer/model/sagas/volume/update_actions.ts similarity index 91% rename from frontend/javascripts/viewer/model/sagas/update_actions.ts rename to frontend/javascripts/viewer/model/sagas/volume/update_actions.ts index a68b389b5cf..90db0ec02e7 100644 --- a/frontend/javascripts/viewer/model/sagas/update_actions.ts +++ b/frontend/javascripts/viewer/model/sagas/volume/update_actions.ts @@ -5,6 +5,7 @@ import type { SendBucketInfo } from "viewer/model/bucket_data_handling/wkstore_a import { convertUserBoundingBoxFromFrontendToServer } from "viewer/model/reducers/reducer_helpers"; import type { Node, Tree, TreeGroup } from "viewer/model/types/tree_types"; import type { + BoundingBoxObject, NumberLike, SegmentGroup, UserBoundingBox, @@ -22,7 +23,7 @@ type PartialBoundingBoxWithoutVisibility = Partial | ReturnType; export type DeleteTreeUpdateAction = ReturnType; export type MoveTreeComponentUpdateAction = ReturnType; -export type MergeTreeUpdateAction = ReturnType; +export type LEGACY_MergeTreeUpdateAction = ReturnType; export type CreateNodeUpdateAction = ReturnType; export type UpdateNodeUpdateAction = ReturnType; export type UpdateTreeVisibilityUpdateAction = ReturnType; @@ -105,13 +106,42 @@ export type UpdateAction = | UpdateActionWithoutIsolationRequirement | UpdateActionWithIsolationRequirement; +export type ApplicableSkeletonUpdateAction = + | UpdateTreeUpdateAction + | UpdateNodeUpdateAction + | CreateNodeUpdateAction + | CreateEdgeUpdateAction + | DeleteTreeUpdateAction + | DeleteEdgeUpdateAction + | DeleteNodeUpdateAction + | MoveTreeComponentUpdateAction + | UpdateTreeEdgesVisibilityUpdateAction + | UpdateTreeGroupsUpdateAction + | UpdateTreeGroupsExpandedStateAction + | AddUserBoundingBoxInSkeletonTracingAction + | UpdateUserBoundingBoxInSkeletonTracingAction + | UpdateUserBoundingBoxVisibilityInSkeletonTracingAction + | DeleteUserBoundingBoxInSkeletonTracingAction; + +export type ApplicableVolumeUpdateAction = + | UpdateLargestSegmentIdVolumeAction + | UpdateSegmentUpdateAction + | CreateSegmentUpdateAction + | DeleteSegmentUpdateAction + | UpdateSegmentGroupsUpdateAction + | AddUserBoundingBoxInVolumeTracingAction + | UpdateUserBoundingBoxInVolumeTracingAction + | DeleteUserBoundingBoxInVolumeTracingAction + | UpdateSegmentGroupsExpandedStateUpdateAction + | UpdateUserBoundingBoxVisibilityInVolumeTracingAction; + export type UpdateActionWithIsolationRequirement = | RevertToVersionUpdateAction | AddLayerToAnnotationUpdateAction; export type UpdateActionWithoutIsolationRequirement = | UpdateTreeUpdateAction | DeleteTreeUpdateAction - | MergeTreeUpdateAction + | LEGACY_MergeTreeUpdateAction | MoveTreeComponentUpdateAction | CreateNodeUpdateAction | UpdateNodeUpdateAction @@ -204,7 +234,7 @@ export function createTree(tree: Tree, actionTracingId: string) { value: { actionTracingId, id: tree.treeId, - updatedId: undefined, + updatedId: undefined, // was never really used, but is kept to keep the type information precise color: tree.color, name: tree.name, timestamp: tree.timestamp, @@ -283,7 +313,11 @@ export function updateTreeGroupVisibility( }, } as const; } -export function mergeTree(sourceTreeId: number, targetTreeId: number, actionTracingId: string) { +export function LEGACY_mergeTree( + sourceTreeId: number, + targetTreeId: number, + actionTracingId: string, +) { return { name: "mergeTree", value: { @@ -330,36 +364,40 @@ export type CreateActionNode = Omit & { position: Node["untransformedPosition"]; treeId: number; resolution: number; + actionTracingId: string; }; export type UpdateActionNode = Omit & { position: Node["untransformedPosition"]; treeId: number; + actionTracingId: string; }; export function createNode(treeId: number, node: Node, actionTracingId: string) { const { untransformedPosition, mag, ...restNode } = node; + const value: CreateActionNode = { + actionTracingId, + ...restNode, + position: untransformedPosition, + treeId, + resolution: mag, + }; return { name: "createNode", - value: { - actionTracingId, - ...restNode, - position: untransformedPosition, - treeId, - resolution: mag, - } as CreateActionNode, + value, } as const; } export function updateNode(treeId: number, node: Node, actionTracingId: string) { const { untransformedPosition, ...restNode } = node; + const value: UpdateActionNode = { + actionTracingId, + ...restNode, + position: untransformedPosition, + treeId, + }; return { name: "updateNode", - value: { - actionTracingId, - ...restNode, - position: untransformedPosition, - treeId, - } as UpdateActionNode, + value, } as const; } export function deleteNode(treeId: number, nodeId: number, actionTracingId: string) { @@ -563,24 +601,26 @@ export function deleteUserBoundingBoxInVolumeTracing( } function _updateUserBoundingBoxHelper( - actionName: "updateUserBoundingBoxInVolumeTracing" | "updateUserBoundingBoxInSkeletonTracing", boundingBoxId: number, updatedProps: PartialBoundingBoxWithoutVisibility, actionTracingId: string, -) { +): { + boundingBoxId: number; + actionTracingId: string; + boundingBox?: BoundingBoxObject; + name?: string; + color?: Vector3; +} { const { boundingBox, ...rest } = updatedProps; const updatedPropsForServer = boundingBox != null ? { ...rest, boundingBox: Utils.computeBoundingBoxObjectFromBoundingBox(boundingBox) } - : updatedProps; + : (updatedProps as Omit); return { - name: actionName, - value: { - boundingBoxId, - actionTracingId, - ...updatedPropsForServer, - }, - } as const; + boundingBoxId, + actionTracingId, + ...updatedPropsForServer, + }; } export function updateUserBoundingBoxInVolumeTracing( @@ -588,12 +628,10 @@ export function updateUserBoundingBoxInVolumeTracing( updatedProps: PartialBoundingBoxWithoutVisibility, actionTracingId: string, ) { - return _updateUserBoundingBoxHelper( - "updateUserBoundingBoxInVolumeTracing", - boundingBoxId, - updatedProps, - actionTracingId, - ); + return { + name: "updateUserBoundingBoxInVolumeTracing", + value: _updateUserBoundingBoxHelper(boundingBoxId, updatedProps, actionTracingId), + } as const; } export function updateUserBoundingBoxInSkeletonTracing( @@ -601,12 +639,10 @@ export function updateUserBoundingBoxInSkeletonTracing( updatedProps: PartialBoundingBoxWithoutVisibility, actionTracingId: string, ) { - return _updateUserBoundingBoxHelper( - "updateUserBoundingBoxInSkeletonTracing", - boundingBoxId, - updatedProps, - actionTracingId, - ); + return { + name: "updateUserBoundingBoxInSkeletonTracing", + value: _updateUserBoundingBoxHelper(boundingBoxId, updatedProps, actionTracingId), + } as const; } export function updateUserBoundingBoxVisibilityInSkeletonTracing( @@ -714,12 +750,19 @@ export function updateBucket( base64Data: string, actionTracingId: string, ) { + if (base64Data == null) { + throw new Error("Invalid updateBucket action."); + } return { name: "updateBucket", value: { actionTracingId, ...bucketInfo, - base64Data, + // The frontend should always send base64Data. However, + // the return type of this function is also used for the + // update actions that can be retrieved from the server. + // In that case, the value will always be undefined. + base64Data: base64Data as string | undefined, }, } as const; } diff --git a/frontend/javascripts/viewer/model/sagas/volumetracing_saga.tsx b/frontend/javascripts/viewer/model/sagas/volumetracing_saga.tsx index f78e6a0947e..1eb7a78be93 100644 --- a/frontend/javascripts/viewer/model/sagas/volumetracing_saga.tsx +++ b/frontend/javascripts/viewer/model/sagas/volumetracing_saga.tsx @@ -57,13 +57,13 @@ import { import { markVolumeTransactionEnd } from "viewer/model/bucket_data_handling/bucket"; import type { Saga } from "viewer/model/sagas/effect-generators"; import { select, take } from "viewer/model/sagas/effect-generators"; -import listenToMinCut from "viewer/model/sagas/min_cut_saga"; -import listenToQuickSelect from "viewer/model/sagas/quick_select_saga"; import { requestBucketModificationInVolumeTracing, takeEveryUnlessBusy, takeWithBatchActionSupport, } from "viewer/model/sagas/saga_helpers"; +import listenToMinCut from "viewer/model/sagas/volume/min_cut_saga"; +import listenToQuickSelect from "viewer/model/sagas/volume/quick_select/quick_select_saga"; import { type UpdateActionWithoutIsolationRequirement, createSegmentVolumeAction, @@ -77,7 +77,7 @@ import { updateSegmentGroupsExpandedState, updateSegmentVisibilityVolumeAction, updateSegmentVolumeAction, -} from "viewer/model/sagas/update_actions"; +} from "viewer/model/sagas/volume/update_actions"; import type VolumeLayer from "viewer/model/volumetracing/volumelayer"; import { Model, api } from "viewer/singletons"; import type { SegmentMap, VolumeTracing } from "viewer/store"; diff --git a/frontend/javascripts/viewer/model_initialization.ts b/frontend/javascripts/viewer/model_initialization.ts index 0692c708e91..14bb62291c6 100644 --- a/frontend/javascripts/viewer/model_initialization.ts +++ b/frontend/javascripts/viewer/model_initialization.ts @@ -110,6 +110,7 @@ import DataLayer from "viewer/model/data_layer"; import type { DatasetConfiguration, DatasetLayerConfiguration, + StoreDataset, TraceOrViewCommand, UserConfiguration, } from "viewer/store"; @@ -118,7 +119,7 @@ import { getUserStateForTracing } from "./model/accessors/annotation_accessor"; import { doAllLayersHaveTheSameRotation } from "./model/accessors/dataset_layer_transformation_accessor"; import { setVersionNumberAction } from "./model/actions/save_actions"; import { - convertPointToVecInBoundingBox, + convertBoundingBoxProtoToObject, convertServerAdditionalAxesToFrontEnd, convertServerAnnotationToFrontendAnnotation, } from "./model/reducers/reducer_helpers"; @@ -206,16 +207,17 @@ export async function initialize( datasetId = initialCommandType.datasetId; } - const [dataset, initialUserSettings, serverTracings] = await fetchParallel( + const [apiDataset, initialUserSettings, serverTracings] = await fetchParallel( annotation, datasetId, version, ); - maybeFixDatasetNameInURL(dataset, initialCommandType); + maybeFixDatasetNameInURL(apiDataset, initialCommandType); const serverVolumeTracings = getServerVolumeTracings(serverTracings); const serverVolumeTracingIds = serverVolumeTracings.map((volumeTracing) => volumeTracing.id); - initializeDataset(initialFetch, dataset, serverTracings); + const dataset = preprocessDataset(apiDataset, serverTracings); + initializeDataset(initialFetch, dataset); const initialDatasetSettings = await getDatasetViewConfiguration( dataset, serverVolumeTracingIds, @@ -325,7 +327,7 @@ async function fetchEditableMappings( return Promise.all(promises); } -function validateSpecsForLayers(dataset: APIDataset, requiredBucketCapacity: number): any { +function validateSpecsForLayers(dataset: StoreDataset, requiredBucketCapacity: number): any { const layers = dataset.dataSource.dataLayers; const specs = getSupportedTextureSpecs(); validateMinimumRequirements(specs); @@ -447,11 +449,25 @@ function setInitialTool() { } } -function initializeDataset( - initialFetch: boolean, +export function preprocessDataset( dataset: APIDataset, serverTracings: Array, -): void { +): StoreDataset { + const mutableDataset = dataset as any as MutableAPIDataset; + const volumeTracings = getServerVolumeTracings(serverTracings); + + if (volumeTracings.length > 0) { + const newDataLayers = getMergedDataLayersFromDatasetAndVolumeTracings(dataset, volumeTracings); + mutableDataset.dataSource.dataLayers = newDataLayers; + validateVolumeLayers(volumeTracings, newDataLayers); + } + + (mutableDataset as StoreDataset).areLayersPreprocessed = true; + + return mutableDataset as StoreDataset; +} + +function initializeDataset(initialFetch: boolean, dataset: StoreDataset): void { let error; if (!dataset) { @@ -477,21 +493,13 @@ function initializeDataset( datasetName: dataset.name, datasetId: dataset.id, }); - const mutableDataset = dataset as any as MutableAPIDataset; - const volumeTracings = getServerVolumeTracings(serverTracings); - if (volumeTracings.length > 0) { - const newDataLayers = getMergedDataLayersFromDatasetAndVolumeTracings(dataset, volumeTracings); - mutableDataset.dataSource.dataLayers = newDataLayers; - validateVolumeLayers(volumeTracings, newDataLayers); - } - - Store.dispatch(setDatasetAction(mutableDataset as APIDataset)); - initializeAdditionalCoordinates(mutableDataset); + Store.dispatch(setDatasetAction(dataset)); + initializeAdditionalCoordinates(dataset); } -function initializeAdditionalCoordinates(mutableDataset: MutableAPIDataset) { - const unifiedAdditionalCoordinates = getUnifiedAdditionalCoordinates(mutableDataset); +function initializeAdditionalCoordinates(dataset: StoreDataset) { + const unifiedAdditionalCoordinates = getUnifiedAdditionalCoordinates(dataset); const initialAdditionalCoordinates = Utils.values(unifiedAdditionalCoordinates).map( ({ name, bounds }) => ({ name, @@ -612,7 +620,7 @@ function getMergedDataLayersFromDatasetAndVolumeTracings( elementClass: tracing.elementClass, category: "segmentation", largestSegmentId: tracing.largestSegmentId, - boundingBox: convertPointToVecInBoundingBox(tracing.boundingBox), + boundingBox: convertBoundingBoxProtoToObject(tracing.boundingBox), resolutions: tracingMags, mappings: fallbackLayer != null && "mappings" in fallbackLayer ? fallbackLayer.mappings : undefined, @@ -955,7 +963,7 @@ function enforcePricingRestrictionsOnUserConfiguration( function applyAnnotationSpecificViewConfiguration( annotation: APIAnnotation | null | undefined, - dataset: APIDataset, + dataset: StoreDataset, originalDatasetSettings: DatasetConfiguration, ): DatasetConfiguration { /** diff --git a/frontend/javascripts/viewer/store.ts b/frontend/javascripts/viewer/store.ts index 0edcc7ebdc8..db159e9fec3 100644 --- a/frontend/javascripts/viewer/store.ts +++ b/frontend/javascripts/viewer/store.ts @@ -33,7 +33,6 @@ import type { TracingType, } from "types/api_types"; import type { - BoundingBoxType, ContourMode, ControlMode, FillMode, @@ -52,7 +51,7 @@ import type { BLEND_MODES, ControlModeEnum } from "viewer/constants"; import type { TracingStats } from "viewer/model/accessors/annotation_accessor"; import type { AnnotationTool } from "viewer/model/accessors/tool_accessor"; import type { Action } from "viewer/model/actions/actions"; -import type { UpdateAction } from "viewer/model/sagas/update_actions"; +import type { UpdateAction } from "viewer/model/sagas/volume/update_actions"; import type { Toolkit } from "./model/accessors/tool_accessor"; import type { MutableTreeGroup, @@ -61,6 +60,7 @@ import type { TreeMap, } from "./model/types/tree_types"; +import type { BoundingBoxMinMaxType, BoundingBoxObject } from "types/bounding_box"; // Value imports import defaultState from "viewer/default_state"; import actionLoggerMiddleware from "viewer/model/helpers/action_logger_middleware"; @@ -82,27 +82,23 @@ import FlycamInfoCacheReducer from "./model/reducers/flycam_info_cache_reducer"; import OrganizationReducer from "./model/reducers/organization_reducer"; import type { StartAIJobModalState } from "./view/action-bar/starting_job_modals"; -export type BoundingBoxObject = { - readonly topLeft: Vector3; - readonly width: number; - readonly height: number; - readonly depth: number; -}; -export type UserBoundingBoxToServer = { +export type { BoundingBoxObject } from "types/bounding_box"; + +export type UserBoundingBoxForServer = { boundingBox: BoundingBoxObject; id: number; - name?: string; - color?: Vector3; + name: string; + color: Vector3; isVisible?: boolean; }; export type UserBoundingBoxWithoutIdMaybe = { - boundingBox?: BoundingBoxType; + boundingBox?: BoundingBoxMinMaxType; name?: string; color?: Vector3; isVisible?: boolean; }; export type UserBoundingBoxWithoutId = { - boundingBox: BoundingBoxType; + boundingBox: BoundingBoxMinMaxType; name: string; color: Vector3; isVisible: boolean; @@ -150,11 +146,12 @@ export type Annotation = { readonly othersMayEdit: boolean; readonly blockedByUser: APIUserCompact | null | undefined; readonly isLockedByOwner: boolean; + readonly isMutexAcquired: boolean; }; type TracingBase = { readonly createdTimestamp: number; readonly tracingId: string; - readonly boundingBox: BoundingBoxType | null | undefined; + readonly boundingBox: BoundingBoxMinMaxType | null | undefined; readonly userBoundingBoxes: Array; readonly additionalAxes: AdditionalAxis[]; }; @@ -560,11 +557,20 @@ export type LocalSegmentationData = { readonly hideUnregisteredSegments: boolean; }; +export type StoreDataset = APIDataset & { + // The backend serves an APIDataset object. The frontend + // adds/merges volume tracing objects into that dataset. The + // StoreDataset reflects this on a type level. For example, + // one cannot accidentally use the APIDataset during store + // initialization (which would be incorrect). + areLayersPreprocessed: true; +}; + export type WebknossosState = { readonly datasetConfiguration: DatasetConfiguration; readonly userConfiguration: UserConfiguration; readonly temporaryConfiguration: TemporaryConfiguration; - readonly dataset: APIDataset; + readonly dataset: StoreDataset; readonly annotation: StoreAnnotation; readonly task: Task | null | undefined; readonly save: SaveState; @@ -583,7 +589,7 @@ export type WebknossosState = { }; const sagaMiddleware = createSagaMiddleware(); export type Reducer = (state: WebknossosState, action: Action) => WebknossosState; -const combinedReducers = reduceReducers( +export const combinedReducer = reduceReducers( SettingsReducer, DatasetReducer, SkeletonTracingReducer, @@ -598,16 +604,16 @@ const combinedReducers = reduceReducers( UiReducer, ConnectomeReducer, OrganizationReducer, -); +) as Reducer; const store = createStore( - enableBatching(combinedReducers), + enableBatching(combinedReducer as any), defaultState, applyMiddleware(actionLoggerMiddleware, overwriteActionMiddleware, sagaMiddleware as Middleware), ); export function startSaga(saga: Saga) { - sagaMiddleware.run(saga); + return sagaMiddleware.run(saga); } export type StoreType = typeof store; diff --git a/frontend/javascripts/viewer/view/action-bar/download_modal_view.tsx b/frontend/javascripts/viewer/view/action-bar/download_modal_view.tsx index 8d9b0dd18fb..b7b83d788dd 100644 --- a/frontend/javascripts/viewer/view/action-bar/download_modal_view.tsx +++ b/frontend/javascripts/viewer/view/action-bar/download_modal_view.tsx @@ -42,7 +42,8 @@ import { type AdditionalAxis, type VoxelSize, } from "types/api_types"; -import type { BoundingBoxType, Vector3 } from "viewer/constants"; +import type { BoundingBoxMinMaxType } from "types/bounding_box"; +import type { Vector3 } from "viewer/constants"; import { getByteCountFromLayer, getDataLayers, @@ -123,7 +124,7 @@ function getExportLayerInfos( }; } -export function isBoundingBoxExportable(boundingBox: BoundingBoxType, mag: Vector3) { +export function isBoundingBoxExportable(boundingBox: BoundingBoxMinMaxType, mag: Vector3) { const shape = computeShapeFromBoundingBox(boundingBox); const volume = Math.ceil(shape[0] / mag[0]) * Math.ceil(shape[1] / mag[1]) * Math.ceil(shape[2] / mag[2]); @@ -164,7 +165,7 @@ export function isBoundingBoxExportable(boundingBox: BoundingBoxType, mag: Vecto function estimateFileSize( selectedLayer: APIDataLayer, mag: Vector3, - boundingBox: BoundingBoxType, + boundingBox: BoundingBoxMinMaxType, exportFormat: ExportFormat, ) { const shape = computeShapeFromBoundingBox(boundingBox); diff --git a/frontend/javascripts/viewer/view/context_menu.tsx b/frontend/javascripts/viewer/view/context_menu.tsx index 8647780c348..f2b2b385b66 100644 --- a/frontend/javascripts/viewer/view/context_menu.tsx +++ b/frontend/javascripts/viewer/view/context_menu.tsx @@ -96,7 +96,7 @@ import { cutAgglomerateFromNeighborsAction, minCutAgglomerateAction, minCutAgglomerateWithPositionAction, - proofreadMerge, + proofreadMergeAction, } from "viewer/model/actions/proofread_actions"; import { loadAdHocMeshAction, @@ -108,7 +108,6 @@ import { createTreeAction, deleteBranchpointByIdAction, deleteEdgeAction, - deleteNodeAsUserAction, expandParentGroupsOfTreeAction, mergeTreesAction, setActiveNodeAction, @@ -125,8 +124,8 @@ import { updateSegmentAction, } from "viewer/model/actions/volumetracing_actions"; import { extractPathAsNewTree } from "viewer/model/reducers/skeletontracing_reducer_helpers"; -import { isBoundingBoxUsableForMinCut } from "viewer/model/sagas/min_cut_saga"; import { getBoundingBoxInMag1 } from "viewer/model/sagas/volume/helpers"; +import { isBoundingBoxUsableForMinCut } from "viewer/model/sagas/volume/min_cut_saga"; import { voxelToVolumeInUnit } from "viewer/model/scaleinfo"; import { api } from "viewer/singletons"; import type { @@ -138,6 +137,7 @@ import type { VolumeTracing, } from "viewer/store"; +import { deleteNodeAsUserAction } from "viewer/model/actions/skeletontracing_actions_with_effects"; import { type MutableNode, type Tree, TreeMap } from "viewer/model/types/tree_types"; import Store from "viewer/store"; import { @@ -449,7 +449,9 @@ function getMeshItems( // Should not happen due to the disabled property. return; } - return Store.dispatch(proofreadMerge(null, maybeUnmappedSegmentId, clickedMeshId)); + return Store.dispatch( + proofreadMergeAction(null, maybeUnmappedSegmentId, clickedMeshId), + ); }, label: ( Merge with active segment @@ -1136,7 +1138,7 @@ function getNoNodeContextMenuOptions(props: NoNodeContextMenuProps): ItemType[] ? { key: "merge-agglomerate-skeleton", disabled: !isProofreadingActive, - onClick: () => Store.dispatch(proofreadMerge(globalPosition)), + onClick: () => Store.dispatch(proofreadMergeAction(globalPosition)), label: ( (null); const dispatch = useDispatch(); - const setChangeBoundingBoxBounds = (id: number, boundingBox: BoundingBoxType) => + const setChangeBoundingBoxBounds = (id: number, boundingBox: BoundingBoxMinMaxType) => dispatch( changeUserBoundingBoxAction(id, { boundingBox, diff --git a/frontend/javascripts/viewer/view/right-border-tabs/dataset_info_tab_view.tsx b/frontend/javascripts/viewer/view/right-border-tabs/dataset_info_tab_view.tsx index 390efb86d3a..1ddbdbbdfbb 100644 --- a/frontend/javascripts/viewer/view/right-border-tabs/dataset_info_tab_view.tsx +++ b/frontend/javascripts/viewer/view/right-border-tabs/dataset_info_tab_view.tsx @@ -31,9 +31,11 @@ import type { StoreAnnotation, Task, WebknossosState } from "viewer/store"; import { getOrganization } from "admin/rest_api"; import FastTooltip from "components/fast_tooltip"; +import { useWkSelector } from "libs/react_hooks"; import { mayUserEditDataset, pluralize, safeNumberToStr } from "libs/utils"; import messages from "messages"; import type { EmptyObject } from "types/globals"; +import { WkDevFlags } from "viewer/api/wk_dev"; import { mayEditAnnotationProperties } from "viewer/model/accessors/annotation_accessor"; import { formatUserName } from "viewer/model/accessors/user_accessor"; import { getReadableNameForLayerName } from "viewer/model/accessors/volumetracing_accessor"; @@ -624,6 +626,7 @@ export class DatasetInfoTabView extends React.PureComponent { return (
+ {WkDevFlags.debugging.showCurrentVersionInInfoTab && } {this.getAnnotationName()} {this.getAnnotationDescription()} {this.getDatasetName()} @@ -653,6 +656,13 @@ export class DatasetInfoTabView extends React.PureComponent { } } +function DebugInfo() { + const versionOnClient = useWkSelector((state) => { + return state.annotation.version; + }); + return <>Version: {versionOnClient}; +} + const mapStateToProps = (state: WebknossosState): StateProps => ({ annotation: state.annotation, dataset: state.dataset, diff --git a/frontend/javascripts/viewer/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx b/frontend/javascripts/viewer/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx index d13cd59ed92..4d55b1b2515 100644 --- a/frontend/javascripts/viewer/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx +++ b/frontend/javascripts/viewer/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx @@ -43,7 +43,6 @@ import { deleteTreesAction, deselectActiveTreeAction, deselectActiveTreeGroupAction, - handleDeleteTreeByUser, selectNextTreeAction, setActiveTreeAction, setActiveTreeGroupAction, @@ -55,6 +54,7 @@ import { toggleAllTreesAction, toggleInactiveTreesAction, } from "viewer/model/actions/skeletontracing_actions"; +import { handleDeleteTreeByUser } from "viewer/model/actions/skeletontracing_actions_with_effects"; import { setDropzoneModalVisibilityAction } from "viewer/model/actions/ui_actions"; import { importVolumeTracingAction, diff --git a/frontend/javascripts/viewer/view/version_entry.tsx b/frontend/javascripts/viewer/view/version_entry.tsx index 231f110abf6..ad2f6b69298 100644 --- a/frontend/javascripts/viewer/view/version_entry.tsx +++ b/frontend/javascripts/viewer/view/version_entry.tsx @@ -40,10 +40,10 @@ import type { DeleteTreeUpdateAction, DeleteUserBoundingBoxInSkeletonTracingAction, DeleteUserBoundingBoxInVolumeTracingAction, + LEGACY_MergeTreeUpdateAction, LEGACY_UpdateUserBoundingBoxesInSkeletonTracingUpdateAction, LEGACY_UpdateUserBoundingBoxesInVolumeTracingUpdateAction, MergeAgglomerateUpdateAction, - MergeTreeUpdateAction, MoveTreeComponentUpdateAction, RevertToVersionUpdateAction, ServerUpdateAction, @@ -71,7 +71,7 @@ import type { UpdateUserBoundingBoxInVolumeTracingAction, UpdateUserBoundingBoxVisibilityInSkeletonTracingAction, UpdateUserBoundingBoxVisibilityInVolumeTracingAction, -} from "viewer/model/sagas/update_actions"; +} from "viewer/model/sagas/volume/update_actions"; import type { StoreAnnotation } from "viewer/store"; import { MISSING_GROUP_ID } from "viewer/view/right-border-tabs/trees_tab/tree_hierarchy_view_helpers"; type Description = { @@ -387,7 +387,7 @@ const descriptionFns: Record< icon: , }), // This should never be shown since currently this update action is never dispatched. - mergeTree: (action: AsServerAction): Description => ({ + mergeTree: (action: AsServerAction): Description => ({ description: `Merged the trees with id ${action.value.sourceId} and ${action.value.targetId}.`, icon: , }), diff --git a/frontend/javascripts/viewer/view/version_list.tsx b/frontend/javascripts/viewer/view/version_list.tsx index 21f319e8c30..3c022d4a133 100644 --- a/frontend/javascripts/viewer/view/version_list.tsx +++ b/frontend/javascripts/viewer/view/version_list.tsx @@ -26,7 +26,7 @@ import { type ServerUpdateAction, revertToVersion, serverCreateTracing, -} from "viewer/model/sagas/update_actions"; +} from "viewer/model/sagas/volume/update_actions"; import { Model } from "viewer/singletons"; import { api } from "viewer/singletons"; import type { StoreAnnotation } from "viewer/store"; @@ -80,7 +80,7 @@ export async function previewVersion(version?: number) { segmentationLayersToReload.push(...Model.getSegmentationTracingLayers()); for (const segmentationLayer of segmentationLayersToReload) { - segmentationLayer.cube.collectAllBuckets(); + segmentationLayer.cube.removeAllBuckets(); segmentationLayer.layerRenderingManager.refresh(); } } diff --git a/tsconfig.json b/tsconfig.json index ac62082774f..1991b0630e4 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -12,16 +12,8 @@ "target": "esnext", "skipLibCheck": true, //reconsider this later "allowSyntheticDefaultImports": true, - "baseUrl": "./frontend/javascripts", "paths": { - "viewer/*": ["./viewer/*"], - "libs/*": ["./libs/*"], - "dashboard/*": ["./dashboard/*"], - "components/*": ["./components/*"], - "admin/*": ["./admin/*"], - "types/*": ["./types/*"], - "test/*": ["./test/*"], - "features": ["./features.ts"] + "*": ["./frontend/javascripts/*"] } }, "include": [ diff --git a/unreleased_changes/8648.md b/unreleased_changes/8648.md new file mode 100644 index 00000000000..dc1d665f4dc --- /dev/null +++ b/unreleased_changes/8648.md @@ -0,0 +1,2 @@ +### Added +- When you are viewing an annotation and another user changes that annotation, these changes will be automatically shown. For some changes (e.g., when adding a new annotation layer), you will still need to reload the page, but most of the time WEBKNOSSOS will update the annotation automatically. diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 4991972d234..4f30ce81929 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -185,17 +185,18 @@ class EditableMappingService @Inject()( def findSegmentIdAtPositionIfNeeded(remoteFallbackLayer: RemoteFallbackLayer, positionOpt: Option[Vec3Int], segmentIdOpt: Option[Long], - mag: Vec3Int)(implicit tc: TokenContext): Fox[Long] = + magOpt: Option[Vec3Int])(implicit tc: TokenContext): Fox[Long] = segmentIdOpt match { case Some(segmentId) => Fox.successful(segmentId) - case None => findSegmentIdAtPosition(remoteFallbackLayer, positionOpt, mag) + case None => findSegmentIdAtPosition(remoteFallbackLayer, positionOpt, magOpt) } private def findSegmentIdAtPosition(remoteFallbackLayer: RemoteFallbackLayer, positionOpt: Option[Vec3Int], - mag: Vec3Int)(implicit tc: TokenContext): Fox[Long] = + magOpt: Option[Vec3Int])(implicit tc: TokenContext): Fox[Long] = for { pos <- positionOpt.toFox ?~> "segment id or position is required in editable mapping action" + mag <- magOpt.toFox ?~> "segment id or mag is required in editable mapping action" voxelAsBytes: Array[Byte] <- remoteDatastoreClient.getVoxelAtPosition(remoteFallbackLayer, pos, mag) voxelAsLongArray: Array[Long] <- bytesToLongs(voxelAsBytes, remoteFallbackLayer.elementClass) _ <- Fox.fromBool(voxelAsLongArray.length == 1) ?~> s"Expected one, got ${voxelAsLongArray.length} segment id values for voxel." diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala index 472de842ad5..61ad9841df0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala @@ -11,12 +11,12 @@ trait EditableMappingUpdateAction extends LayerUpdateAction { // we switched from positions to segment ids in https://github.com/scalableminds/webknossos/pull/7742. // Both are now optional to support applying old update actions stored in the db. -case class SplitAgglomerateUpdateAction(agglomerateId: Long, // Unused, we now look this up by position/segment +case class SplitAgglomerateUpdateAction(agglomerateId: Option[Long], // Unused, we now look this up by position/segment segmentPosition1: Option[Vec3Int], segmentPosition2: Option[Vec3Int], segmentId1: Option[Long], segmentId2: Option[Long], - mag: Vec3Int, + mag: Option[Vec3Int], actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[ObjectId] = None, @@ -36,13 +36,13 @@ object SplitAgglomerateUpdateAction { // we switched from positions to segment ids in https://github.com/scalableminds/webknossos/pull/7742. // Both are now optional to support applying old update actions stored in the db. -case class MergeAgglomerateUpdateAction(agglomerateId1: Long, // Unused, we now look this up by position/segment - agglomerateId2: Long, // Unused, we now look this up by position/segment +case class MergeAgglomerateUpdateAction(agglomerateId1: Option[Long], // Unused, we now look this up by position/segment + agglomerateId2: Option[Long], // Unused, we now look this up by position/segment segmentPosition1: Option[Vec3Int], segmentPosition2: Option[Vec3Int], segmentId1: Option[Long], segmentId2: Option[Long], - mag: Vec3Int, + mag: Option[Vec3Int], actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[ObjectId] = None,