From 9f89d383a944d02e16a88c2deb47e843df51b3d8 Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 23 Jun 2025 14:55:04 +0200 Subject: [PATCH 01/10] Explore remote datasets as virtual datasets --- app/controllers/DatasetController.scala | 8 ++-- .../WKRemoteDataStoreController.scala | 30 ++++++++++++- app/models/dataset/DataStore.scala | 3 +- app/models/dataset/DatasetService.scala | 37 ++++++++++++++-- .../dataset/WKRemoteDataStoreClient.scala | 12 ------ .../explore/WKExploreRemoteLayerService.scala | 23 ++++++---- conf/webknossos.latest.routes | 1 + .../controllers/DataSourceController.scala | 29 ++++--------- .../dataformats/layers/N5DataLayers.scala | 40 +++++++++++++++++- .../layers/PrecomputedDataLayers.scala | 40 +++++++++++++++++- .../dataformats/layers/WKWDataLayers.scala | 40 +++++++++++++++++- .../dataformats/layers/Zarr3DataLayers.scala | 40 +++++++++++++++++- .../dataformats/layers/ZarrDataLayers.scala | 42 +++++++++++++++++-- .../models/datasource/DataLayer.scala | 2 + .../services/DSRemoteWebknossosClient.scala | 21 +++++++++- .../services/DataSourceService.scala | 5 +++ .../EditableMappingLayer.scala | 3 ++ .../tracings/volume/VolumeTracingLayer.scala | 2 + 18 files changed, 313 insertions(+), 65 deletions(-) diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index 5858ab5a6ea..9944a35769e 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -145,10 +145,10 @@ class DatasetController @Inject()(userService: UserService, _ <- Fox.fromBool(dataSource.dataLayers.nonEmpty) ?~> "dataset.explore.zeroLayers" folderIdOpt <- Fox.runOptional(request.body.folderPath)(folderPath => folderService.getOrCreateFromPathLiteral(folderPath, request.identity._organization)) ?~> "dataset.explore.autoAdd.getFolder.failed" - _ <- wkExploreRemoteLayerService.addRemoteDatasource(dataSource, - request.body.datasetName, - request.identity, - folderIdOpt) ?~> "dataset.explore.autoAdd.failed" + _ <- wkExploreRemoteLayerService.addRemoteDatasourceToDatabase(dataSource, + request.body.datasetName, + request.identity, + folderIdOpt) ?~> "dataset.explore.autoAdd.failed" } yield Ok } diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index 37f0b70d2ca..121f7dc2b61 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -9,7 +9,7 @@ import com.scalableminds.webknossos.datastore.helpers.{LayerMagLinkInfo, MagLink import com.scalableminds.webknossos.datastore.models.UnfinishedUpload import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.datasource.inbox.{InboxDataSourceLike => InboxDataSource} -import com.scalableminds.webknossos.datastore.services.{DataSourcePathInfo, DataStoreStatus} +import com.scalableminds.webknossos.datastore.services.{DataSourcePathInfo, DataSourceRegistrationInfo, DataStoreStatus} import com.scalableminds.webknossos.datastore.services.uploading.{ LinkedLayerIdentifier, ReserveAdditionalInformation, @@ -270,6 +270,34 @@ class WKRemoteDataStoreController @Inject()( } + // Register a datasource from the datastore as a dataset in the database. + // This is called when adding remote virtual datasets (that should only exist in the database) + // by the data store after exploration. + def registerDataSource(name: String, + key: String, + organizationId: String, + directoryName: String, + token: String): Action[DataSourceRegistrationInfo] = + Action.async(validateJson[DataSourceRegistrationInfo]) { implicit request => + dataStoreService.validateAccess(name, key) { dataStore => + for { + user <- bearerTokenService.userForToken(token) + organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( + "organization.notFound", + organizationId) ~> NOT_FOUND + _ <- Fox.fromBool(organization._id == user._organization) ?~> "notAllowed" ~> FORBIDDEN + dataset <- datasetService.createVirtualDataset( + directoryName, + organizationId, + dataStore, + request.body.dataSource, + request.body.folderId, + user + ) + } yield Ok(dataset._id.toString) + } + } + def jobExportProperties(name: String, key: String, jobId: ObjectId): Action[AnyContent] = Action.async { implicit request => dataStoreService.validateAccess(name, key) { _ => diff --git a/app/models/dataset/DataStore.scala b/app/models/dataset/DataStore.scala index 1672b253ce3..6c5ddc6e691 100644 --- a/app/models/dataset/DataStore.scala +++ b/app/models/dataset/DataStore.scala @@ -79,8 +79,7 @@ class DataStoreService @Inject()(dataStoreDAO: DataStoreDAO, jobService: JobServ def validateAccess(name: String, key: String)(block: DataStore => Future[Result])( implicit m: MessagesProvider): Fox[Result] = - Fox.fromFuture((for { - dataStore <- dataStoreDAO.findOneByName(name)(GlobalAccessContext) + Fox.fromFuture((for {dataStore <- dataStoreDAO.findOneByName(name)(GlobalAccessContext) _ <- Fox.fromBool(key == dataStore.key) result <- Fox.fromFuture(block(dataStore)) } yield result).getOrElse(Forbidden(Json.obj("granted" -> false, "msg" -> Messages("dataStore.notFound"))))) diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index af2082f8944..5a7de91f2e8 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -1,6 +1,6 @@ package models.dataset -import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, DBAccessContext, GlobalAccessContext} import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} @@ -23,6 +23,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ AbstractDataLayer, AbstractSegmentationLayer, DataFormat, + DataSource, DataSourceId, GenericDataSource, DataLayerLike => DataLayer @@ -36,6 +37,7 @@ import models.team._ import models.user.{User, UserService} import net.liftweb.common.Box.tryo import net.liftweb.common.{Empty, EmptyBox, Full} +import play.api.i18n.Messages import play.api.libs.json.{JsObject, Json} import security.RandomIDGenerator import utils.WkConf @@ -97,6 +99,34 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, } yield newDataset } + private def virtualRemoteDatasetStatus = "Virtual remote dataset" + + def createVirtualDataset(datasetName: String, + organizationId: String, + dataStore: DataStore, + dataSource: DataSource, + folderId: Option[String], + user: User): Fox[Dataset] = + for { + _ <- assertValidDatasetName(datasetName) + isDatasetNameAlreadyTaken <- datasetDAO.doesDatasetDirectoryExistInOrganization(datasetName, organizationId)( + GlobalAccessContext) + _ <- Fox.fromBool(!isDatasetNameAlreadyTaken) ?~> "dataset.name.alreadyTaken" + organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> "organization.notFound" + folderId <- ObjectId.fromString(folderId.getOrElse(organization._rootFolder.toString)) ?~> "dataset.upload.folderId.invalid" + _ <- folderDAO.assertUpdateAccess(folderId)(AuthorizedAccessContext(user)) ?~> "folder.noWriteAccess" + newDatasetId = ObjectId.generate + abstractDataSource = dataSource.copy(dataLayers = dataSource.dataLayers.map(_.asAbstractLayer)) + dataset <- createDataset(dataStore, + newDatasetId, + datasetName, + abstractDataSource, + status = Some(virtualRemoteDatasetStatus)) + datasetId = dataset._id + _ <- datasetDAO.updateFolder(datasetId, folderId)(GlobalAccessContext) + _ <- addUploader(dataset, user._id)(GlobalAccessContext) + } yield dataset + def getAllUnfinishedDatasetUploadsOfUser(userId: ObjectId, organizationId: String)( implicit ctx: DBAccessContext): Fox[List[DatasetCompactInfo]] = datasetDAO.findAllCompactWithSearch( @@ -114,7 +144,8 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, datasetId: ObjectId, datasetName: String, dataSource: InboxDataSource, - publication: Option[ObjectId] = None + publication: Option[ObjectId] = None, + status: Option[String] = None ): Fox[Dataset] = { implicit val ctx: DBAccessContext = GlobalAccessContext val metadata = @@ -147,7 +178,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, name = datasetName, voxelSize = dataSource.voxelSizeOpt, sharingToken = None, - status = dataSource.statusOpt.getOrElse(""), + status = status.orElse(dataSource.statusOpt).getOrElse(""), logoUrl = None, metadata = metadata ) diff --git a/app/models/dataset/WKRemoteDataStoreClient.scala b/app/models/dataset/WKRemoteDataStoreClient.scala index b593d21cbd7..47c0b8c1a77 100644 --- a/app/models/dataset/WKRemoteDataStoreClient.scala +++ b/app/models/dataset/WKRemoteDataStoreClient.scala @@ -82,18 +82,6 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin .silent .getWithJsonResponse[List[DirectoryStorageReport]] - def addDataSource(organizationId: String, - datasetName: String, - dataSource: GenericDataSource[DataLayer], - folderId: Option[ObjectId], - userToken: String): Fox[Unit] = - for { - _ <- rpc(s"${dataStore.url}/data/datasets/$organizationId/$datasetName") - .addQueryString("token" -> userToken) - .addQueryStringOptional("folderId", folderId.map(_.toString)) - .postJson(dataSource) - } yield () - def hasSegmentIndexFile(organizationId: String, datasetName: String, layerName: String)( implicit ec: ExecutionContext): Fox[Boolean] = { val cacheKey = (organizationId, datasetName, layerName) diff --git a/app/models/dataset/explore/WKExploreRemoteLayerService.scala b/app/models/dataset/explore/WKExploreRemoteLayerService.scala index 79bb2493069..2072ad1f472 100644 --- a/app/models/dataset/explore/WKExploreRemoteLayerService.scala +++ b/app/models/dataset/explore/WKExploreRemoteLayerService.scala @@ -105,17 +105,22 @@ class WKExploreRemoteLayerService @Inject()(credentialService: CredentialService credentialId <- Fox.runOptional(credentialOpt)(c => credentialService.insertOne(c)) ?~> "dataVault.credential.insert.failed" } yield credentialId - def addRemoteDatasource(dataSource: GenericDataSource[DataLayer], - datasetName: String, - user: User, - folderId: Option[ObjectId])(implicit ctx: DBAccessContext): Fox[Unit] = + def addRemoteDatasourceToDatabase(dataSource: GenericDataSource[DataLayer], + datasetName: String, + user: User, + folderId: Option[ObjectId])(implicit ctx: DBAccessContext): Fox[Unit] = for { - organization <- organizationDAO.findOne(user._organization) dataStore <- dataStoreDAO.findOneWithUploadsAllowed + organizationId = user._organization _ <- datasetService.assertValidDatasetName(datasetName) - client = new WKRemoteDataStoreClient(dataStore, rpc) - userToken <- bearerTokenService.createAndInitDataStoreTokenForUser(user) - _ <- client.addDataSource(organization._id, datasetName, dataSource, folderId, userToken) - } yield () + datasetId <- datasetService.createVirtualDataset( + dataSource.id.directoryName, + organizationId, + dataStore, + dataSource, + folderId.map(_.toString), + user + ) + } yield datasetId } diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index d3c94188a70..805d6418736 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -111,6 +111,7 @@ PUT /datastores/:name/datasources PUT /datastores/:name/datasources/paths controllers.WKRemoteDataStoreController.updatePaths(name: String, key: String) GET /datastores/:name/datasources/:organizationId/:directoryName/paths controllers.WKRemoteDataStoreController.getPaths(name: String, key: String, organizationId: String, directoryName: String) GET /datastores/:name/datasources/:datasetId controllers.WKRemoteDataStoreController.getDataSource(name: String, key: String, datasetId: ObjectId) +POST /datastores/:name/datasources/:organizationId/:directoryName controllers.WKRemoteDataStoreController.registerDataSource(name: String, key: String, organizationId: String, directoryName: String, token: String) PATCH /datastores/:name/status controllers.WKRemoteDataStoreController.statusUpdate(name: String, key: String) POST /datastores/:name/reserveUpload controllers.WKRemoteDataStoreController.reserveDatasetUpload(name: String, key: String, token: String) GET /datastores/:name/getUnfinishedUploadsForUser controllers.WKRemoteDataStoreController.getUnfinishedUploadsForUser(name: String, key: String, token: String, organizationName: String) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 86df450b005..46de84d519d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -387,32 +387,17 @@ class DataSourceController @Inject()( } } - // Stores a remote dataset in the database. + // Called by the frontend after the user has set datasetName / FolderId of an explored dataSource + // Add this data source to the WK database def add(organizationId: String, datasetName: String, folderId: Option[String]): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { for { - reservedAdditionalInfo <- dsRemoteWebknossosClient.reserveDataSourceUpload( - ReserveUploadInformation( - uploadId = "", // Set by core backend - name = datasetName, - organization = organizationId, - totalFileCount = 1, - filePaths = None, - totalFileSizeInBytes = None, - layersToLink = None, - initialTeams = List.empty, - folderId = folderId, - requireUniqueName = Some(false), - ) - ) ?~> "dataset.upload.validation.failed" - datasourceId = DataSourceId(reservedAdditionalInfo.directoryName, organizationId) - _ <- dataSourceService.updateDataSource(request.body.copy(id = datasourceId), expectExisting = false) - uploadedDatasetId <- dsRemoteWebknossosClient.reportUpload(datasourceId, - 0L, - needsConversion = false, - viaAddRoute = true) ?~> "reportUpload.failed" - } yield Ok(Json.obj("newDatasetId" -> uploadedDatasetId)) + _ <- Fox.successful(()) + dataSourceId = DataSourceId(datasetName, organizationId) + dataSource = request.body.copy(id = dataSourceId) + datasetId <- dsRemoteWebknossosClient.registerDataSource(dataSource, dataSourceId, folderId) ?~> "dataset.add.failed" + } yield Ok(Json.obj("newDatasetId" -> datasetId)) } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala index fa689fe813a..47760a0a9ca 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala @@ -37,7 +37,24 @@ case class N5DataLayer( override val numChannels: Option[Int] = Some(1), additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None, -) extends N5Layer +) extends N5Layer { + override def asAbstractLayer: DataLayerLike = + AbstractDataLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object N5DataLayer { implicit val jsonFormat: OFormat[N5DataLayer] = Json.format[N5DataLayer] @@ -57,7 +74,26 @@ case class N5SegmentationLayer( additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None, ) extends SegmentationLayer - with N5Layer + with N5Layer { + override def asAbstractLayer: DataLayerLike = + AbstractSegmentationLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + largestSegmentId, + mappings, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object N5SegmentationLayer { implicit val jsonFormat: OFormat[N5SegmentationLayer] = Json.format[N5SegmentationLayer] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala index 1c6c0554ab1..bd2baf7ef98 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala @@ -37,7 +37,24 @@ case class PrecomputedDataLayer( override val numChannels: Option[Int] = Some(1), additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None, -) extends PrecomputedLayer +) extends PrecomputedLayer { + override def asAbstractLayer: DataLayerLike = + AbstractDataLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object PrecomputedDataLayer { implicit val jsonFormat: OFormat[PrecomputedDataLayer] = Json.format[PrecomputedDataLayer] @@ -57,7 +74,26 @@ case class PrecomputedSegmentationLayer( additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None, ) extends SegmentationLayer - with PrecomputedLayer + with PrecomputedLayer { + override def asAbstractLayer: DataLayerLike = + AbstractSegmentationLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + largestSegmentId, + mappings, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object PrecomputedSegmentationLayer { implicit val jsonFormat: OFormat[PrecomputedSegmentationLayer] = Json.format[PrecomputedSegmentationLayer] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala index e892fd99524..96c3f7e1813 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala @@ -46,7 +46,24 @@ case class WKWDataLayer( coordinateTransformations: Option[List[CoordinateTransformation]] = None, additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None, -) extends WKWLayer +) extends WKWLayer { + override def asAbstractLayer: DataLayerLike = + AbstractDataLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + None, + None, + Some(dataFormat) + ) +} object WKWDataLayer { implicit val jsonFormat: OFormat[WKWDataLayer] = Json.format[WKWDataLayer] @@ -65,7 +82,26 @@ case class WKWSegmentationLayer( additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None ) extends SegmentationLayer - with WKWLayer + with WKWLayer { + def asAbstractLayer: AbstractSegmentationLayer = + AbstractSegmentationLayer( + name, + Category.segmentation, + boundingBox, + resolutions, + elementClass, + largestSegmentId, + mappings, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + None, + None, + Some(dataFormat) + ) +} object WKWSegmentationLayer { implicit val jsonFormat: OFormat[WKWSegmentationLayer] = Json.format[WKWSegmentationLayer] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala index 4dff3a58649..7a5b3f7b61c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala @@ -38,7 +38,24 @@ case class Zarr3DataLayer( override val numChannels: Option[Int] = Some(1), additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None -) extends Zarr3Layer +) extends Zarr3Layer { + override def asAbstractLayer: DataLayerLike = + AbstractDataLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object Zarr3DataLayer { implicit val jsonFormat: OFormat[Zarr3DataLayer] = Json.format[Zarr3DataLayer] @@ -58,7 +75,26 @@ case class Zarr3SegmentationLayer( additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None ) extends SegmentationLayer - with Zarr3Layer + with Zarr3Layer { + override def asAbstractLayer: DataLayerLike = + AbstractSegmentationLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + largestSegmentId, + mappings, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object Zarr3SegmentationLayer { implicit val jsonFormat: OFormat[Zarr3SegmentationLayer] = Json.format[Zarr3SegmentationLayer] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala index d8439efc38b..34244ca92bd 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala @@ -3,7 +3,7 @@ package com.scalableminds.webknossos.datastore.dataformats.layers import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.webknossos.datastore.dataformats.{DatasetArrayBucketProvider, MagLocator} -import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration +import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.{LayerViewConfiguration, empty} import com.scalableminds.webknossos.datastore.models.datasource.{DataFormat, _} import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService import play.api.libs.json.{Json, OFormat} @@ -37,7 +37,24 @@ case class ZarrDataLayer( override val additionalAxes: Option[Seq[AdditionalAxis]], attachments: Option[DatasetLayerAttachments] = None, override val dataFormat: DataFormat.Value, -) extends ZarrLayer +) extends ZarrLayer { + override def asAbstractLayer: DataLayerLike = + AbstractDataLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object ZarrDataLayer { implicit val jsonFormat: OFormat[ZarrDataLayer] = Json.format[ZarrDataLayer] @@ -58,7 +75,26 @@ case class ZarrSegmentationLayer( attachments: Option[DatasetLayerAttachments] = None, override val dataFormat: DataFormat.Value, ) extends SegmentationLayer - with ZarrLayer + with ZarrLayer { + override def asAbstractLayer: AbstractSegmentationLayer = + AbstractSegmentationLayer( + name, + Category.segmentation, + boundingBox, + resolutions, + elementClass, + largestSegmentId, + mappings, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object ZarrSegmentationLayer { implicit val jsonFormat: OFormat[ZarrSegmentationLayer] = Json.format[ZarrSegmentationLayer] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala index 4e216b4b5bb..a8277e10434 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala @@ -367,6 +367,8 @@ trait DataLayer extends DataLayerLike { case _ => this } } + + def asAbstractLayer: DataLayerLike } object DataLayer { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala index d730d01677d..8cf537410d2 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala @@ -12,7 +12,7 @@ import com.scalableminds.webknossos.datastore.controllers.JobExportProperties import com.scalableminds.webknossos.datastore.helpers.{IntervalScheduler, LayerMagLinkInfo} import com.scalableminds.webknossos.datastore.models.UnfinishedUpload import com.scalableminds.webknossos.datastore.models.annotation.AnnotationSource -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, GenericDataSource} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId, GenericDataSource} import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSourceLike import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.uploading.{ @@ -49,6 +49,12 @@ object MagPathInfo { implicit val jsonFormat: OFormat[MagPathInfo] = Json.format[MagPathInfo] } +case class DataSourceRegistrationInfo(dataSource: DataSource, folderId: Option[String]) + +object DataSourceRegistrationInfo { + implicit val jsonFormat: OFormat[DataSourceRegistrationInfo] = Json.format[DataSourceRegistrationInfo] +} + trait RemoteWebknossosClient { def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] } @@ -135,6 +141,19 @@ class DSRemoteWebknossosClient @Inject()( .postJsonWithJsonResponse[ReserveUploadInformation, ReserveAdditionalInformation](info) } yield reserveUploadInfo + def registerDataSource(dataSource: DataSource, dataSourceId: DataSourceId, folderId: Option[String])( + implicit tc: TokenContext): Fox[String] = + for { + _ <- Fox.successful(()) + info = DataSourceRegistrationInfo(dataSource, folderId) + response <- rpc( + s"$webknossosUri/api/datastores/$dataStoreName/datasources/${dataSourceId.organizationId}/${dataSourceId.directoryName}") + .addQueryString("key" -> dataStoreKey) + .withTokenFromContext + .postJson[DataSourceRegistrationInfo](info) + datasetId = response.body + } yield datasetId + def deleteDataSource(id: DataSourceId): Fox[_] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/deleteDataset") .addQueryString("key" -> dataStoreKey) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala index f7df1834983..74b9d55439d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala @@ -266,6 +266,11 @@ class DataSourceService @Inject()( } } + def dataSourceShouldBeStoredOnDisk(dataSource: DataSource): Boolean = + // If all mags have a "path" defined, data is not stored in the datasource (but at the location of the path) + // and we do not need to store it on disk. + !dataSource.dataLayers.forall(layer => layer.mags.forall(mag => mag.path.isDefined)) + def updateDataSource(dataSource: DataSource, expectExisting: Boolean): Fox[Unit] = { val organizationDir = dataBaseDir.resolve(dataSource.id.organizationId) val dataSourcePath = organizationDir.resolve(dataSource.id.directoryName) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index d822ab2f7e2..6f207727912 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -15,6 +15,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ CoordinateTransformation, DataFormat, DataLayer, + DataLayerLike, DataSourceId, DatasetLayerAttachments, ElementClass, @@ -111,4 +112,6 @@ case class EditableMappingLayer(name: String, // set to tracing id def version: Long = tracing.version def tracingId: String = name + + override def asAbstractLayer: DataLayerLike = ??? } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index 67a4c8528e6..5d3c8b543d2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -134,4 +134,6 @@ case class VolumeTracingLayer( lazy val expectedUncompressedBucketSize: Int = ElementClass.bytesPerElement(elementClass) * scala.math.pow(DataLayer.bucketLength, 3).intValue + + override def asAbstractLayer: DataLayerLike = ??? } From 42101a95a052bf106d151f4611959de680a49a50 Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 23 Jun 2025 16:59:25 +0200 Subject: [PATCH 02/10] Do not have virtual remote datasets deleted --- app/models/dataset/DataStore.scala | 3 ++- app/models/dataset/DatasetService.scala | 7 +++---- app/models/dataset/WKRemoteDataStoreClient.scala | 2 -- .../dataset/explore/WKExploreRemoteLayerService.scala | 3 +-- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/app/models/dataset/DataStore.scala b/app/models/dataset/DataStore.scala index 6c5ddc6e691..1672b253ce3 100644 --- a/app/models/dataset/DataStore.scala +++ b/app/models/dataset/DataStore.scala @@ -79,7 +79,8 @@ class DataStoreService @Inject()(dataStoreDAO: DataStoreDAO, jobService: JobServ def validateAccess(name: String, key: String)(block: DataStore => Future[Result])( implicit m: MessagesProvider): Fox[Result] = - Fox.fromFuture((for {dataStore <- dataStoreDAO.findOneByName(name)(GlobalAccessContext) + Fox.fromFuture((for { + dataStore <- dataStoreDAO.findOneByName(name)(GlobalAccessContext) _ <- Fox.fromBool(key == dataStore.key) result <- Fox.fromFuture(block(dataStore)) } yield result).getOrElse(Forbidden(Json.obj("granted" -> false, "msg" -> Messages("dataStore.notFound"))))) diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index 5a7de91f2e8..26590d2a574 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -37,7 +37,6 @@ import models.team._ import models.user.{User, UserService} import net.liftweb.common.Box.tryo import net.liftweb.common.{Empty, EmptyBox, Full} -import play.api.i18n.Messages import play.api.libs.json.{JsObject, Json} import security.RandomIDGenerator import utils.WkConf @@ -65,7 +64,9 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, with LazyLogging { private val unreportedStatus = datasetDAO.unreportedStatus private val notYetUploadedStatus = "Not yet fully uploaded." - private val inactiveStatusList = List(unreportedStatus, notYetUploadedStatus, datasetDAO.deletedByUserStatus) + private val virtualRemoteDatasetStatus = "Virtual remote dataset" // Virtual datasets should not be deleted when not reported + private val inactiveStatusList = + List(unreportedStatus, notYetUploadedStatus, datasetDAO.deletedByUserStatus, virtualRemoteDatasetStatus) def assertValidDatasetName(name: String): Fox[Unit] = for { @@ -99,8 +100,6 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, } yield newDataset } - private def virtualRemoteDatasetStatus = "Virtual remote dataset" - def createVirtualDataset(datasetName: String, organizationId: String, dataStore: DataStore, diff --git a/app/models/dataset/WKRemoteDataStoreClient.scala b/app/models/dataset/WKRemoteDataStoreClient.scala index 47c0b8c1a77..abecc28931e 100644 --- a/app/models/dataset/WKRemoteDataStoreClient.scala +++ b/app/models/dataset/WKRemoteDataStoreClient.scala @@ -9,14 +9,12 @@ import com.scalableminds.webknossos.datastore.explore.{ ExploreRemoteLayerParameters } import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, RawCuboidRequest} -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, GenericDataSource} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.DirectoryStorageReport import com.typesafe.scalalogging.LazyLogging import controllers.RpcTokenHolder import play.api.libs.json.JsObject import play.utils.UriEncoding -import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt diff --git a/app/models/dataset/explore/WKExploreRemoteLayerService.scala b/app/models/dataset/explore/WKExploreRemoteLayerService.scala index 2072ad1f472..e4e186fd972 100644 --- a/app/models/dataset/explore/WKExploreRemoteLayerService.scala +++ b/app/models/dataset/explore/WKExploreRemoteLayerService.scala @@ -121,6 +121,5 @@ class WKExploreRemoteLayerService @Inject()(credentialService: CredentialService folderId.map(_.toString), user ) - } yield datasetId - + } yield () } From 18dfe9837742d4481adcbd906dff1dcffa4fc003 Mon Sep 17 00:00:00 2001 From: frcroth Date: Wed, 25 Jun 2025 10:27:38 +0200 Subject: [PATCH 03/10] Put mag in db --- app/models/dataset/Dataset.scala | 4 ++-- app/models/dataset/DatasetService.scala | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index ece9e98eddb..54bdb2edea4 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -762,8 +762,8 @@ class DatasetMagsDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionConte layer.magsOpt match { case Some(mags) => mags.map(mag => { - q"""INSERT INTO webknossos.dataset_mags(_dataset, dataLayerName, mag, axisOrder, channelIndex, credentialId) - VALUES($datasetId, ${layer.name}, ${mag.mag}, ${mag.axisOrder + q"""INSERT INTO webknossos.dataset_mags(_dataset, dataLayerName, mag, path, axisOrder, channelIndex, credentialId) + VALUES($datasetId, ${layer.name}, ${mag.mag}, ${mag.path}, ${mag.axisOrder .map(Json.toJson(_))}, ${mag.channelIndex}, ${mag.credentialId}) """.asUpdate }) diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index 26590d2a574..496f94de154 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -51,7 +51,6 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, datasetLastUsedTimesDAO: DatasetLastUsedTimesDAO, datasetDataLayerDAO: DatasetLayerDAO, datasetMagsDAO: DatasetMagsDAO, - datasetLayerAttachmentsDAO: DatasetLayerAttachmentsDAO, teamDAO: TeamDAO, folderDAO: FolderDAO, dataStoreService: DataStoreService, From 9c3cf7416493055494a991eaa6dececcf87dc7a9 Mon Sep 17 00:00:00 2001 From: frcroth Date: Wed, 25 Jun 2025 10:28:50 +0200 Subject: [PATCH 04/10] Add temporary front end for testing virtual datasets --- .../viewer/model/bucket_data_handling/wkstore_adapter.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts index d484c9b2e9b..c4a729e9b0e 100644 --- a/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts @@ -97,15 +97,17 @@ export async function requestWithFallback( batch: Array, ): Promise | null | undefined>> { const state = Store.getState(); + const datasetId = state.dataset.id; const datasetDirectoryName = state.dataset.directoryName; const organization = state.dataset.owningOrganization; const dataStoreHost = state.dataset.dataStore.url; const tracingStoreHost = state.annotation.tracingStore.url; + // Prefer datasetId (id) if available, otherwise fall back to old method const getDataStoreUrl = (optLayerName?: string) => - `${dataStoreHost}/data/datasets/${organization}/${datasetDirectoryName}/layers/${ - optLayerName || layerInfo.name - }`; + datasetId + ? `${dataStoreHost}/data/wkDatasets/${datasetId}/layers/${optLayerName || layerInfo.name}` + : `${dataStoreHost}/data/datasets/${organization}/${datasetDirectoryName}/layers/${optLayerName || layerInfo.name}`; const getTracingStoreUrl = () => `${tracingStoreHost}/tracings/volume/${layerInfo.name}`; From 391227a2f057fe6b526c3f6fd22837276c49ea64 Mon Sep 17 00:00:00 2001 From: frcroth Date: Wed, 25 Jun 2025 12:05:28 +0200 Subject: [PATCH 05/10] Use mags for WKW datasets --- app/models/dataset/DatasetService.scala | 31 ++++++- .../dataformats/layers/WKWDataLayers.scala | 88 +++++++++++++++++-- .../dataformats/layers/ZarrDataLayers.scala | 2 +- 3 files changed, 108 insertions(+), 13 deletions(-) diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index 496f94de154..4ea101653fa 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -9,6 +9,8 @@ import com.scalableminds.webknossos.datastore.dataformats.layers.{ N5SegmentationLayer, PrecomputedDataLayer, PrecomputedSegmentationLayer, + WKWDataLayer, + WKWSegmentationLayer, Zarr3DataLayer, Zarr3SegmentationLayer, ZarrDataLayer, @@ -360,8 +362,18 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, case Some(df) => df match { case DataFormat.wkw => - throw new NotImplementedError( - "WKW data format not supported in this context, only datasets with MagLocators are supported") + WKWDataLayer( + name, + category, + boundingBox, + mags, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachmentsOpt + ) case DataFormat.neuroglancerPrecomputed => PrecomputedDataLayer( name, @@ -442,8 +454,19 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, case Some(df) => df match { case DataFormat.wkw => - throw new NotImplementedError( - "WKW data format not supported in this context, only datasets with MagLocators are supported") + WKWSegmentationLayer( + name, + boundingBox, + mags, + elementClass, + mappings, + largestSegmentId, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachmentsOpt + ) case DataFormat.neuroglancerPrecomputed => PrecomputedSegmentationLayer( name, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala index 96c3f7e1813..991b96765e6 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala @@ -6,7 +6,7 @@ import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, Datas import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource._ import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService -import play.api.libs.json.{Json, OFormat} +import play.api.libs.json.{Format, JsError, JsResult, JsSuccess, JsValue, Json, OFormat} import ucar.ma2.{Array => MultiArray} case class WKWResolution(resolution: Vec3Int, cubeLength: Int) @@ -26,12 +26,12 @@ trait WKWLayer extends DataLayer { def wkwResolutions: List[WKWResolution] - def mags: List[MagLocator] = wkwResolutions.map(wkwResolution => MagLocator(wkwResolution.resolution)) - def resolutions: List[Vec3Int] = wkwResolutions.map(_.resolution) + def defaultCubeSize = 32 + def lengthOfUnderlyingCubes(mag: Vec3Int): Int = - wkwResolutions.find(_.resolution == mag).map(_.cubeLength).getOrElse(0) + wkwResolutions.find(_.resolution == mag).map(_ => defaultCubeSize).getOrElse(0) } @@ -39,7 +39,7 @@ case class WKWDataLayer( name: String, category: Category.Value, boundingBox: BoundingBox, - wkwResolutions: List[WKWResolution], + mags: List[MagLocator], elementClass: ElementClass.Value, defaultViewConfiguration: Option[LayerViewConfiguration] = None, adminViewConfiguration: Option[LayerViewConfiguration] = None, @@ -63,16 +63,51 @@ case class WKWDataLayer( None, Some(dataFormat) ) + + override def wkwResolutions: List[WKWResolution] = mags.map(mag => WKWResolution(mag.mag, defaultCubeSize)) } object WKWDataLayer { - implicit val jsonFormat: OFormat[WKWDataLayer] = Json.format[WKWDataLayer] + implicit val jsonFormat: Format[WKWDataLayer] = new Format[WKWDataLayer] { + def reads(json: JsValue): JsResult[WKWDataLayer] = + for { + mag: List[MagLocator] <- (json \ "wkwResolutions").validate[List[WKWResolution]] match { + case JsSuccess(value, _) => JsSuccess(value.map(resolution => MagLocator(resolution.resolution))) + case JsError(_) => (json \ "mags").validate[List[MagLocator]] + } + name <- (json \ "name").validate[String] + category <- (json \ "category").validate[Category.Value] + boundingBox <- (json \ "boundingBox").validate[BoundingBox] + elementClass <- (json \ "elementClass").validate[ElementClass.Value] + defaultViewConfiguration <- (json \ "defaultViewConfiguration").validateOpt[LayerViewConfiguration] + adminViewConfiguration <- (json \ "adminViewConfiguration").validateOpt[LayerViewConfiguration] + coordinateTransformations <- (json \ "coordinateTransformations").validateOpt[List[CoordinateTransformation]] + additionalAxes <- (json \ "additionalAxes").validateOpt[Seq[AdditionalAxis]] + attachments <- (json \ "attachments").validateOpt[DatasetLayerAttachments] + } yield { + WKWDataLayer( + name, + category, + boundingBox, + mag, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments + ) + } + + def writes(layer: WKWDataLayer): JsValue = + Json.writes[WKWDataLayer].writes(layer) + } } case class WKWSegmentationLayer( name: String, boundingBox: BoundingBox, - wkwResolutions: List[WKWResolution], + mags: List[MagLocator], elementClass: ElementClass.Value, mappings: Option[Set[String]], largestSegmentId: Option[Long] = None, @@ -101,8 +136,45 @@ case class WKWSegmentationLayer( None, Some(dataFormat) ) + + override def wkwResolutions: List[WKWResolution] = mags.map(mag => WKWResolution(mag.mag, defaultCubeSize)) } object WKWSegmentationLayer { - implicit val jsonFormat: OFormat[WKWSegmentationLayer] = Json.format[WKWSegmentationLayer] + implicit val jsonFormat: Format[WKWSegmentationLayer] = new Format[WKWSegmentationLayer] { + def reads(json: JsValue): JsResult[WKWSegmentationLayer] = + for { + mag: List[MagLocator] <- (json \ "wkwResolutions").validate[List[WKWResolution]] match { + case JsSuccess(value, _) => JsSuccess(value.map(resolution => MagLocator(resolution.resolution))) + case JsError(_) => (json \ "mags").validate[List[MagLocator]] + } + name <- (json \ "name").validate[String] + boundingBox <- (json \ "boundingBox").validate[BoundingBox] + elementClass <- (json \ "elementClass").validate[ElementClass.Value] + largestSegmentId <- (json \ "largestSegmentId").validateOpt[Long] + mappings <- (json \ "mappings").validateOpt[Set[String]] + defaultViewConfiguration <- (json \ "defaultViewConfiguration").validateOpt[LayerViewConfiguration] + adminViewConfiguration <- (json \ "adminViewConfiguration").validateOpt[LayerViewConfiguration] + coordinateTransformations <- (json \ "coordinateTransformations").validateOpt[List[CoordinateTransformation]] + additionalAxes <- (json \ "additionalAxes").validateOpt[Seq[AdditionalAxis]] + attachments <- (json \ "attachments").validateOpt[DatasetLayerAttachments] + } yield { + WKWSegmentationLayer( + name, + boundingBox, + mag, + elementClass, + mappings, + largestSegmentId, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments + ) + } + + def writes(layer: WKWSegmentationLayer): JsValue = + Json.writes[WKWSegmentationLayer].writes(layer) + } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala index 34244ca92bd..b79ef3d1c6d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala @@ -3,7 +3,7 @@ package com.scalableminds.webknossos.datastore.dataformats.layers import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.webknossos.datastore.dataformats.{DatasetArrayBucketProvider, MagLocator} -import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.{LayerViewConfiguration, empty} +import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource.{DataFormat, _} import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService import play.api.libs.json.{Json, OFormat} From 3b3b13ca98f2baf73a786e86a04e1124b41c9792 Mon Sep 17 00:00:00 2001 From: frcroth Date: Wed, 2 Jul 2025 12:40:55 +0200 Subject: [PATCH 06/10] Move zarr streaming stuff to service, todo: add controller with datasetid routes --- .../datastore/DataStoreModule.scala | 1 + .../controllers/LegacyController.scala | 5 + .../controllers/ZarrStreamingController.scala | 381 +++++++----------- .../services/ZarrStreamingService.scala | 228 +++++++++++ 4 files changed, 385 insertions(+), 230 deletions(-) create mode 100644 webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala create mode 100644 webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ZarrStreamingService.scala diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala index a38bed41991..1d4374616c6 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala @@ -42,5 +42,6 @@ class DataStoreModule extends AbstractModule { bind(classOf[RemoteSourceDescriptorService]).asEagerSingleton() bind(classOf[ChunkCacheService]).asEagerSingleton() bind(classOf[DatasetCache]).asEagerSingleton() + bind(classOf[ZarrStreamingService]).asEagerSingleton() } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala new file mode 100644 index 00000000000..09eb4ddef3c --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala @@ -0,0 +1,5 @@ +package com.scalableminds.webknossos.datastore.controllers + +class LegacyController { + +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index f0a3007d00e..909e5c6da19 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -2,29 +2,20 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject import com.scalableminds.util.accesscontext.TokenContext -import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.dataformats.layers.{ZarrDataLayer, ZarrLayer, ZarrSegmentationLayer} -import com.scalableminds.webknossos.datastore.dataformats.zarr.{Zarr3OutputHelper, ZarrCoordinatesParser} +import com.scalableminds.webknossos.datastore.dataformats.zarr.Zarr3OutputHelper import com.scalableminds.webknossos.datastore.datareaders.zarr.{ - NgffGroupHeader, NgffMetadata, - NgffMetadataV0_5, - ZarrHeader + NgffMetadataV0_5 } -import com.scalableminds.webknossos.datastore.datareaders.zarr3.{Zarr3ArrayHeader, NgffZarr3GroupHeader} +import com.scalableminds.webknossos.datastore.datareaders.zarr3.NgffZarr3GroupHeader import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType, AnnotationSource} import com.scalableminds.webknossos.datastore.models.datasource._ -import com.scalableminds.webknossos.datastore.models.requests.{ - Cuboid, - DataServiceDataRequest, - DataServiceRequestSettings -} -import com.scalableminds.webknossos.datastore.models.VoxelPosition import com.scalableminds.webknossos.datastore.services._ -import play.api.i18n.{Messages, MessagesProvider} -import play.api.libs.json.{JsValue, Json} +import play.api.i18n.Messages +import play.api.libs.json.Json import play.api.mvc._ import scala.concurrent.ExecutionContext @@ -36,6 +27,7 @@ class ZarrStreamingController @Inject()( binaryDataServiceHolder: BinaryDataServiceHolder, remoteWebknossosClient: DSRemoteWebknossosClient, remoteTracingstoreClient: DSRemoteTracingstoreClient, + zarrStreamingService: ZarrStreamingService )(implicit ec: ExecutionContext) extends Controller with Zarr3OutputHelper { @@ -58,12 +50,12 @@ class ZarrStreamingController @Inject()( accessTokenService.validateAccessFromTokenContext( UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND - omeNgffHeader = NgffMetadata.fromNameVoxelSizeAndMags(dataLayerName, dataSource.scale, dataLayer.sortedMags) - } yield Ok(Json.toJson(omeNgffHeader)) + dataSource <- dataSourceRepository + .findUsable(DataSourceId(datasetDirectoryName, organizationId)) + .toFox ~> NOT_FOUND + dataLayer <- dataSource.getDataLayer(dataLayerName).toFox ~> NOT_FOUND + header = zarrStreamingService.getHeader(dataSource, dataLayer) + } yield Ok(Json.toJson(header)) } } @@ -75,16 +67,12 @@ class ZarrStreamingController @Inject()( accessTokenService.validateAccessFromTokenContext( UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND - omeNgffHeaderV0_5 = NgffMetadataV0_5.fromNameVoxelSizeAndMags(dataLayerName, - dataSource.scale, - dataLayer.sortedMags, - dataLayer.additionalAxes) - zarr3GroupHeader = NgffZarr3GroupHeader(3, "group", omeNgffHeaderV0_5) - } yield Ok(Json.toJson(zarr3GroupHeader)) + dataSource <- dataSourceRepository + .findUsable(DataSourceId(datasetDirectoryName, organizationId)) + .toFox ~> NOT_FOUND + dataLayer <- dataSource.getDataLayer(dataLayerName).toFox ~> NOT_FOUND + header = zarrStreamingService.getGroupHeader(dataSource, dataLayer) + } yield Ok(Json.toJson(header)) } } @@ -152,9 +140,7 @@ class ZarrStreamingController @Inject()( dataSource <- dataSourceRepository .findUsable(DataSourceId(datasetDirectoryName, organizationId)) .toFox ~> NOT_FOUND - dataLayers = dataSource.dataLayers - zarrLayers = dataLayers.map(convertLayerToZarrLayer(_, zarrVersion)) - zarrSource = GenericDataSource[DataLayer](dataSource.id, zarrLayers, dataSource.scale) + zarrSource = zarrStreamingService.getZarrDataSource(dataSource, zarrVersion) } yield Ok(Json.toJson(zarrSource)) } } @@ -243,7 +229,12 @@ class ZarrStreamingController @Inject()( ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext( UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - rawZarrCube(organizationId, datasetDirectoryName, dataLayerName, mag, coordinates) + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + result <- zarrStreamingService.rawZarrCube(dataSource, dataLayer, mag, coordinates) + } yield Ok(result) } } @@ -261,52 +252,16 @@ class ZarrStreamingController @Inject()( relevantTokenContext) .map(Ok(_)), orElse = annotationSource => - rawZarrCube(annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName, - mag, - coordinates) + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer( + annotationSource.organizationId, + annotationSource.datasetDirectoryName, + dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + zarrCube <- zarrStreamingService.rawZarrCube(dataSource, dataLayer, mag, coordinates) + } yield Ok(zarrCube) ) } - private def rawZarrCube( - organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - mag: String, - coordinates: String, - )(implicit m: MessagesProvider, tc: TokenContext): Fox[Result] = - for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> SERVICE_UNAVAILABLE - reorderedAdditionalAxes = dataLayer.additionalAxes.map(reorderAdditionalAxes) - (x, y, z, additionalCoordinates) <- ZarrCoordinatesParser.parseNDimensionalDotCoordinates( - coordinates, - reorderedAdditionalAxes) ?~> "zarr.invalidChunkCoordinates" ~> NOT_FOUND - magParsed <- Vec3Int - .fromMagLiteral(mag, allowScalar = true) - .toFox ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND - _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) ~> NOT_FOUND - cubeSize = DataLayer.bucketLength - request = DataServiceDataRequest( - Some(dataSource.id), - dataLayer, - Cuboid( - topLeft = VoxelPosition(x * cubeSize * magParsed.x, - y * cubeSize * magParsed.y, - z * cubeSize * magParsed.z, - magParsed), - width = cubeSize, - height = cubeSize, - depth = cubeSize - ), - DataServiceRequestSettings(halfByte = false, additionalCoordinates = additionalCoordinates) - ) - (data, notFoundIndices) <- binaryDataService.handleDataRequests(List(request)) - _ <- Fox.fromBool(notFoundIndices.isEmpty) ~> "zarr.chunkNotFound" ~> NOT_FOUND - } yield Ok(data) - def requestZArray( organizationId: String, datasetDirectoryName: String, @@ -315,24 +270,15 @@ class ZarrStreamingController @Inject()( ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext( UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - zArray(organizationId, datasetDirectoryName, dataLayerName, mag) + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + zarrHeader <- zarrStreamingService.getZArray(dataLayer, mag) + } yield Ok(Json.toJson(zarrHeader)) } } - private def zArray(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String)( - implicit m: MessagesProvider): Fox[Result] = - for { - (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND - magParsed <- Vec3Int - .fromMagLiteral(mag, allowScalar = true) - .toFox ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND - _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) ~> NOT_FOUND - zarrHeader = ZarrHeader.fromLayer(dataLayer, magParsed) - } yield Ok(Json.toJson(zarrHeader)) - def requestZarrJsonForMag( organizationId: String, datasetDirectoryName: String, @@ -341,24 +287,15 @@ class ZarrStreamingController @Inject()( ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext( UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - zarrJsonForMag(organizationId, datasetDirectoryName, dataLayerName, mag) + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ?~> NOT_FOUND + zarrJson <- zarrStreamingService.requestZarrJsonForMag(dataSource, dataLayer, mag) + } yield Ok(Json.toJson(zarrJson)) } } - private def zarrJsonForMag(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String)( - implicit m: MessagesProvider): Fox[Result] = - for { - (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND - magParsed <- Vec3Int - .fromMagLiteral(mag, allowScalar = true) - .toFox ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND - _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) ~> NOT_FOUND - zarrHeader = Zarr3ArrayHeader.fromDataLayer(dataLayer, magParsed) - } yield Ok(Json.toJson(zarrHeader)) - def zArrayPrivateLink(accessToken: String, dataLayerName: String, mag: String): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( @@ -369,7 +306,13 @@ class ZarrStreamingController @Inject()( .getZArray(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl)(relevantTokenContext) .map(z => Ok(Json.toJson(z))), orElse = annotationSource => - zArray(annotationSource.organizationId, annotationSource.datasetDirectoryName, dataLayerName, mag) + for { + (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(annotationSource.organizationId, + annotationSource.datasetDirectoryName, + dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND + zArray <- zarrStreamingService.getZArray(dataLayer, mag) + } yield Ok(Json.toJson(zArray)) ) } @@ -383,7 +326,13 @@ class ZarrStreamingController @Inject()( .getZarrJson(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl)(relevantTokenContext) .map(z => Ok(Json.toJson(z))), orElse = annotationSource => - zarrJsonForMag(annotationSource.organizationId, annotationSource.datasetDirectoryName, dataLayerName, mag) + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer( + annotationSource.organizationId, + annotationSource.datasetDirectoryName, + dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + zarrJson <- zarrStreamingService.requestZarrJsonForMag(dataSource, dataLayer, mag) + } yield Ok(Json.toJson(zarrJson)) ) } @@ -403,175 +352,149 @@ class ZarrStreamingController @Inject()( } } yield result - def requestDataLayerMagDirectoryContents(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - mag: String, - zarrVersion: Int): Action[AnyContent] = - Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - dataLayerMagDirectoryContents(organizationId, datasetDirectoryName, dataLayerName, mag, zarrVersion) - } + def requestDataLayerDirectoryContents( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + zarrVersion: Int + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + contents <- zarrStreamingService.dataLayerDirectoryContents(dataSource, dataLayer, zarrVersion) + } yield + Ok( + views.html.datastoreZarrDatasourceDir( + "Datastore", + "%s/%s/%s".format(organizationId, datasetDirectoryName, dataLayerName), + contents + )).withHeaders() + } + } - private def dataLayerMagDirectoryContents(organizationId: String, - datasetDirectoryName: String, + def dataLayerDirectoryContentsPrivateLink(accessToken: String, dataLayerName: String, - mag: String, - zarrVersion: Int)(implicit m: MessagesProvider): Fox[Result] = - for { - (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND - magParsed <- Vec3Int - .fromMagLiteral(mag, allowScalar = true) - .toFox ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND - _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) ~> NOT_FOUND - additionalEntries = if (zarrVersion == 2) List(ZarrHeader.FILENAME_DOT_ZARRAY) - else List(Zarr3ArrayHeader.FILENAME_ZARR_JSON) - } yield - Ok( - views.html.datastoreZarrDatasourceDir( - "Datastore", - "%s/%s/%s/%s".format(organizationId, datasetDirectoryName, dataLayerName, mag), - additionalEntries - )).withHeaders() - - def dataLayerMagDirectoryContentsPrivateLink(accessToken: String, - dataLayerName: String, - mag: String, - zarrVersion: Int): Action[AnyContent] = + zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( accessToken, dataLayerName, ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getDataLayerMagDirectoryContents(annotationLayer.tracingId, - mag, - annotationSource.tracingStoreUrl, - zarrVersion)(relevantTokenContext) + .getDataLayerDirectoryContents(annotationLayer.tracingId, annotationSource.tracingStoreUrl, zarrVersion)( + relevantTokenContext) .map( layers => Ok( views.html.datastoreZarrDatasourceDir( - "Combined Annotation Route", + "Tracingstore", s"${annotationLayer.tracingId}", layers )).withHeaders()), orElse = annotationSource => - dataLayerMagDirectoryContents(annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName, - mag, - zarrVersion) + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer( + annotationSource.organizationId, + annotationSource.datasetDirectoryName, + dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + content <- zarrStreamingService.dataLayerDirectoryContents(dataSource, dataLayer, zarrVersion) + } yield Ok(Json.toJson(content)) ) } - def requestDataLayerDirectoryContents(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => + def requestDataLayerMagDirectoryContents( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mag: String, + zarrVersion: Int + ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext( UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - dataLayerDirectoryContents(organizationId, datasetDirectoryName, dataLayerName, zarrVersion) + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + contents <- zarrStreamingService.dataLayerMagDirectoryContents(dataSource, dataLayer, mag, zarrVersion) + } yield + Ok( + views.html.datastoreZarrDatasourceDir( + "Datastore", + "%s/%s/%s/%s".format(organizationId, datasetDirectoryName, dataLayerName, mag), + contents + )).withHeaders() } } - private def dataLayerDirectoryContents(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - zarrVersion: Int)(implicit m: MessagesProvider): Fox[Result] = - for { - (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND - mags = dataLayer.sortedMags - additionalFiles = if (zarrVersion == 2) - List(NgffMetadata.FILENAME_DOT_ZATTRS, NgffGroupHeader.FILENAME_DOT_ZGROUP) - else List(Zarr3ArrayHeader.FILENAME_ZARR_JSON) - } yield - Ok( - views.html.datastoreZarrDatasourceDir( - "Datastore", - "%s/%s/%s".format(organizationId, datasetDirectoryName, dataLayerName), - additionalFiles ++ mags.map(_.toMagLiteral(allowScalar = true)) - )).withHeaders() - - def dataLayerDirectoryContentsPrivateLink(accessToken: String, - dataLayerName: String, - zarrVersion: Int): Action[AnyContent] = + def dataLayerMagDirectoryContentsPrivateLink(accessToken: String, + dataLayerName: String, + mag: String, + zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( accessToken, dataLayerName, ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getDataLayerDirectoryContents(annotationLayer.tracingId, annotationSource.tracingStoreUrl, zarrVersion)( - relevantTokenContext) + .getDataLayerMagDirectoryContents(annotationLayer.tracingId, + mag, + annotationSource.tracingStoreUrl, + zarrVersion)(relevantTokenContext) .map( layers => Ok( views.html.datastoreZarrDatasourceDir( - "Tracingstore", + "Combined Annotation Route", s"${annotationLayer.tracingId}", layers )).withHeaders()), orElse = annotationSource => - dataLayerDirectoryContents(annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName, - zarrVersion) + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer( + annotationSource.organizationId, + annotationSource.datasetDirectoryName, + dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + contents <- zarrStreamingService.dataLayerMagDirectoryContents(dataSource, dataLayer, mag, zarrVersion) + } yield Ok(Json.toJson(contents)) ) } - def requestDataSourceDirectoryContents(organizationId: String, - datasetDirectoryName: String, - zarrVersion: Int): Action[AnyContent] = - Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - for { - dataSource <- dataSourceRepository - .findUsable(DataSourceId(datasetDirectoryName, organizationId)) - .toFox ?~> Messages("dataSource.notFound") ~> NOT_FOUND - layerNames = dataSource.dataLayers.map((dataLayer: DataLayer) => dataLayer.name) - additionalVersionDependantFiles = if (zarrVersion == 2) List(NgffGroupHeader.FILENAME_DOT_ZGROUP) - else List.empty - } yield - Ok( - views.html.datastoreZarrDatasourceDir( - "Datastore", - s"$organizationId/$datasetDirectoryName", - List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) ++ additionalVersionDependantFiles ++ layerNames - )) - } + def requestDataSourceDirectoryContents( + organizationId: String, + datasetDirectoryName: String, + zarrVersion: Int + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + dataSource <- dataSourceRepository + .findUsable(DataSourceId(datasetDirectoryName, organizationId)) + .toFox ~> NOT_FOUND + files <- zarrStreamingService.dataSourceDirectoryContents(dataSource, zarrVersion) + } yield + Ok( + views.html.datastoreZarrDatasourceDir( + "Datastore", + s"$organizationId/$datasetDirectoryName", + List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) ++ files + )) } + } def dataSourceDirectoryContentsPrivateLink(accessToken: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) - dataSource <- dataSourceRepository - .findUsable(DataSourceId(annotationSource.datasetDirectoryName, annotationSource.organizationId)) - .toFox ?~> Messages("dataSource.notFound") ~> NOT_FOUND - annotationLayerNames = annotationSource.annotationLayers.filter(_.typ == AnnotationLayerType.Volume).map(_.name) - dataSourceLayerNames = dataSource.dataLayers - .map((dataLayer: DataLayer) => dataLayer.name) - .filter(!annotationLayerNames.contains(_)) - layerNames = annotationLayerNames ++ dataSourceLayerNames - additionalEntries = if (zarrVersion == 2) - List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON, NgffGroupHeader.FILENAME_DOT_ZGROUP) - else - List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) + contents <- zarrStreamingService.dataSourceDirectoryContentsPrivateLink(accessToken, zarrVersion) } yield Ok( views.html.datastoreZarrDatasourceDir( "Combined datastore and tracingstore directory", s"$accessToken", - additionalEntries ++ layerNames + contents )) } @@ -581,12 +504,10 @@ class ZarrStreamingController @Inject()( Action.async { implicit request => accessTokenService.validateAccessFromTokenContextForSyncBlock( UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - Ok(zGroupJson) + Ok(zarrStreamingService.zGroupJson) } } - private def zGroupJson: JsValue = Json.toJson(NgffGroupHeader(zarr_format = 2)) - def zGroupPrivateLink(accessToken: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( @@ -596,7 +517,7 @@ class ZarrStreamingController @Inject()( remoteTracingstoreClient .getZGroup(annotationLayer.tracingId, annotationSource.tracingStoreUrl)(relevantTokenContext) .map(Ok(_)), - orElse = _ => Fox.successful(Ok(zGroupJson)) + orElse = _ => Fox.successful(Ok(zarrStreamingService.zGroupJson)) ) } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ZarrStreamingService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ZarrStreamingService.scala new file mode 100644 index 00000000000..222109cc60b --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ZarrStreamingService.scala @@ -0,0 +1,228 @@ +package com.scalableminds.webknossos.datastore.services + +import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.dataformats.MagLocator +import com.scalableminds.webknossos.datastore.dataformats.layers.{ZarrDataLayer, ZarrLayer, ZarrSegmentationLayer} +import com.scalableminds.webknossos.datastore.dataformats.zarr.{Zarr3OutputHelper, ZarrCoordinatesParser} +import com.scalableminds.webknossos.datastore.datareaders.zarr._ +import com.scalableminds.webknossos.datastore.datareaders.zarr3.{NgffZarr3GroupHeader, Zarr3ArrayHeader} +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType +import com.scalableminds.webknossos.datastore.models.datasource._ +import com.scalableminds.webknossos.datastore.models.requests._ +import com.scalableminds.webknossos.datastore.models.VoxelPosition +import play.api.i18n.{Messages, MessagesProvider} +import play.api.libs.json.{JsValue, Json} + +import scala.concurrent.ExecutionContext +import com.scalableminds.webknossos.datastore.datareaders.AxisOrder + +class ZarrStreamingService @Inject()( + dataSourceRepository: DataSourceRepository, + accessTokenService: DataStoreAccessTokenService, + binaryDataServiceHolder: BinaryDataServiceHolder, + remoteWebknossosClient: DSRemoteWebknossosClient, + remoteTracingstoreClient: DSRemoteTracingstoreClient, +)(implicit ec: ExecutionContext) + extends Zarr3OutputHelper + with FoxImplicits { + + val binaryDataService: BinaryDataService = binaryDataServiceHolder.binaryDataService + + def getHeader( + dataSource: DataSource, + dataLayer: DataLayer, + ): NgffMetadata = + NgffMetadata.fromNameVoxelSizeAndMags(dataLayer.name, dataSource.scale, dataLayer.sortedMags) + + def getGroupHeader( + dataSource: DataSource, + dataLayer: DataLayer + ): NgffZarr3GroupHeader = { + val omeNgffHeaderV0_5 = NgffMetadataV0_5.fromNameVoxelSizeAndMags(dataLayer.name, + dataSource.scale, + dataLayer.sortedMags, + dataLayer.additionalAxes) + + val zarr3GroupHeader = NgffZarr3GroupHeader(3, "group", omeNgffHeaderV0_5) + zarr3GroupHeader + } + + def zGroupJson: JsValue = Json.toJson(NgffGroupHeader(zarr_format = 2)) + + def getZarrDataSource( + dataSource: DataSource, + zarrVersion: Int + ): DataSource = { + val dataLayers = dataSource.dataLayers + val zarrLayers = dataLayers.map(convertLayerToZarrLayer(_, zarrVersion)) + val zarrSource = GenericDataSource[DataLayer](dataSource.id, zarrLayers, dataSource.scale) + zarrSource + } + + private def convertLayerToZarrLayer(layer: DataLayer, zarrVersion: Int): ZarrLayer = { + val dataFormat = if (zarrVersion == 2) DataFormat.zarr else DataFormat.zarr3 + layer match { + case s: SegmentationLayer => + val rank = s.additionalAxes.map(_.length).getOrElse(0) + 4 + ZarrSegmentationLayer( + s.name, + s.boundingBox, + s.elementClass, + mags = s.sortedMags.map( + m => + MagLocator(m, + Some(s"./${s.name}/${m.toMagLiteral(allowScalar = true)}"), + None, + Some(AxisOrder.cAdditionalxyz(rank)), + None, + None)), + mappings = s.mappings, + largestSegmentId = s.largestSegmentId, + numChannels = Some(if (s.elementClass == ElementClass.uint24) 3 else 1), + defaultViewConfiguration = s.defaultViewConfiguration, + adminViewConfiguration = s.adminViewConfiguration, + coordinateTransformations = s.coordinateTransformations, + additionalAxes = s.additionalAxes.map(reorderAdditionalAxes), + dataFormat = dataFormat + ) + case d: DataLayer => + val rank = d.additionalAxes.map(_.length).getOrElse(0) + 4 + ZarrDataLayer( + d.name, + d.category, + d.boundingBox, + d.elementClass, + mags = d.sortedMags.map( + m => + MagLocator(m, + Some(s"./${d.name}/${m.toMagLiteral(allowScalar = true)}"), + None, + Some(AxisOrder.cAdditionalxyz(rank)), + None, + None)), + numChannels = Some(if (d.elementClass == ElementClass.uint24) 3 else 1), + defaultViewConfiguration = d.defaultViewConfiguration, + adminViewConfiguration = d.adminViewConfiguration, + coordinateTransformations = d.coordinateTransformations, + additionalAxes = d.additionalAxes.map(reorderAdditionalAxes), + dataFormat = dataFormat + ) + } + } + + def rawZarrCube( + dataSource: DataSource, + dataLayer: DataLayer, + mag: String, + coordinates: String + )(implicit m: MessagesProvider, tc: TokenContext): Fox[Array[Byte]] = + for { + _ <- Fox.successful(()) + reorderedAdditionalAxes = dataLayer.additionalAxes.map(reorderAdditionalAxes) + (x, y, z, additionalCoordinates) <- ZarrCoordinatesParser.parseNDimensionalDotCoordinates( + coordinates, + reorderedAdditionalAxes) ?~> "zarr.invalidChunkCoordinates" + magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true).toFox ?~> Messages("dataLayer.invalidMag", mag) + dataLayerName = dataLayer.name + _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) + cubeSize = DataLayer.bucketLength + request = DataServiceDataRequest( + Some(dataSource.id), + dataLayer, + Cuboid( + topLeft = VoxelPosition(x * cubeSize * magParsed.x, + y * cubeSize * magParsed.y, + z * cubeSize * magParsed.z, + magParsed), + width = cubeSize, + height = cubeSize, + depth = cubeSize + ), + DataServiceRequestSettings(halfByte = false, additionalCoordinates = additionalCoordinates) + ) + (data, notFoundIndices) <- binaryDataService.handleDataRequests(List(request)) + _ <- Fox.fromBool(notFoundIndices.isEmpty) ~> "zarr.chunkNotFound" + } yield data + + def getZArray( + dataLayer: DataLayer, + mag: String + )(implicit m: MessagesProvider): Fox[ZarrHeader] = + for { + magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true).toFox ?~> Messages("dataLayer.invalidMag", mag) + dataLayerName = dataLayer.name + _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) + } yield ZarrHeader.fromLayer(dataLayer, magParsed) + + def requestZarrJsonForMag( + dataSource: DataSource, + dataLayer: DataLayer, + mag: String + )(implicit m: MessagesProvider): Fox[Zarr3ArrayHeader] = + for { + magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true).toFox ?~> Messages("dataLayer.invalidMag", mag) + dataLayerName = dataLayer.name + _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) + zarrHeader = Zarr3ArrayHeader.fromDataLayer(dataLayer, magParsed) + } yield zarrHeader + + def dataLayerDirectoryContents( + dataSource: DataSource, + dataLayer: DataLayer, + zarrVersion: Int + )(implicit m: MessagesProvider): Fox[List[String]] = + for { + _ <- Fox.successful(()) + mags = dataLayer.sortedMags + additionalFiles = if (zarrVersion == 2) + List(NgffMetadata.FILENAME_DOT_ZATTRS, NgffGroupHeader.FILENAME_DOT_ZGROUP) + else List(Zarr3ArrayHeader.FILENAME_ZARR_JSON) + } yield (additionalFiles ++ mags.map(_.toMagLiteral(allowScalar = true))) + + def dataLayerMagDirectoryContents( + dataSource: DataSource, + dataLayer: DataLayer, + mag: String, + zarrVersion: Int + )(implicit m: MessagesProvider): Fox[List[String]] = + for { + magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true).toFox ?~> Messages("dataLayer.invalidMag", mag) + dataLayerName = dataLayer.name + _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) + additionalEntries = if (zarrVersion == 2) List(ZarrHeader.FILENAME_DOT_ZARRAY) + else List(Zarr3ArrayHeader.FILENAME_ZARR_JSON) + } yield additionalEntries + + def dataSourceDirectoryContents( + dataSource: DataSource, + zarrVersion: Int + ): Fox[List[String]] = + for { + _ <- Fox.successful(()) + layerNames = dataSource.dataLayers.map((dataLayer: DataLayer) => dataLayer.name) + additionalVersionDependantFiles = if (zarrVersion == 2) List(NgffGroupHeader.FILENAME_DOT_ZGROUP) + else List.empty + } yield (layerNames ++ additionalVersionDependantFiles) + + def dataSourceDirectoryContentsPrivateLink(accessToken: String, zarrVersion: Int)( + implicit tc: TokenContext): Fox[List[String]] = + for { + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) + dataSource <- dataSourceRepository // TODO: Use datasetcache here + .findUsable(DataSourceId(annotationSource.datasetDirectoryName, annotationSource.organizationId)) + .toFox + annotationLayerNames = annotationSource.annotationLayers.filter(_.typ == AnnotationLayerType.Volume).map(_.name) + dataSourceLayerNames = dataSource.dataLayers + .map((dataLayer: DataLayer) => dataLayer.name) + .filter(!annotationLayerNames.contains(_)) + layerNames = annotationLayerNames ++ dataSourceLayerNames + additionalEntries = if (zarrVersion == 2) + List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON, NgffGroupHeader.FILENAME_DOT_ZGROUP) + else + List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) + } yield additionalEntries ++ layerNames + +} From 3f81a85894c27f89719d69329d04553629fe7a11 Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 7 Jul 2025 11:28:54 +0200 Subject: [PATCH 07/10] Move old zarr routes to LegacyController, update zarr routes to use id --- conf/webknossos.versioned.routes | 4 +- .../controllers/LegacyController.scala | 218 +++++++++++++++++- .../controllers/ZarrStreamingController.scala | 164 +++++-------- .../services/ZarrStreamingService.scala | 2 +- .../conf/datastore.latest.routes | 44 ++-- .../conf/datastore.versioned.routes | 28 +++ .../conf/tracingstore.versioned.routes | 1 + 7 files changed, 330 insertions(+), 131 deletions(-) diff --git a/conf/webknossos.versioned.routes b/conf/webknossos.versioned.routes index 7faa44ce744..39a1e214766 100644 --- a/conf/webknossos.versioned.routes +++ b/conf/webknossos.versioned.routes @@ -3,7 +3,8 @@ # example: assume, the features route has changed, introducing v2. The older v1 needs to be provided in the legacyApiController # Note: keep this in sync with the reported version numbers in the com.scalableminds.util.mvc.ApiVersioning trait -# version log:updateDatasetV8 +# version log + # changed in v10: Datasets are accessed by their id, not their name and organization id. This leads to changes to the datastore routes. # changed in v9: Datasets are now identified by their id, not their name. The routes now need to pass a dataset id instead of a name and organization id tuple. # Requests to the TracingStore and DatasStore need to address a dataset based on its directoryName and organization id. # changed in v8: Datasets' name was renamed to id and the displayName is now named name. @@ -14,6 +15,7 @@ # new in v3: annotation info and finish request now take timestamp # new in v2: annotation json contains visibility enum instead of booleans +-> /v10/ webknossos.latest.Routes -> /v9/ webknossos.latest.Routes # v8: support changes to v9 diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala index 09eb4ddef3c..1a14f8b10d3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala @@ -1,5 +1,221 @@ package com.scalableminds.webknossos.datastore.controllers -class LegacyController { +import com.google.inject.Inject +import com.scalableminds.webknossos.datastore.dataformats.zarr.Zarr3OutputHelper +import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, GenericDataSource} +import com.scalableminds.webknossos.datastore.services.{ + BinaryDataServiceHolder, + DSRemoteTracingstoreClient, + DSRemoteWebknossosClient, + DataSourceRepository, + DataStoreAccessTokenService, + UserAccessRequest, + ZarrStreamingService +} +import play.api.libs.json.Json +import play.api.mvc.{Action, AnyContent} + +import scala.concurrent.ExecutionContext + +class LegacyController @Inject()( + dataSourceRepository: DataSourceRepository, + accessTokenService: DataStoreAccessTokenService, + binaryDataServiceHolder: BinaryDataServiceHolder, + remoteWebknossosClient: DSRemoteWebknossosClient, + remoteTracingstoreClient: DSRemoteTracingstoreClient, + zarrStreamingService: ZarrStreamingService +)(implicit ec: ExecutionContext) + extends Controller + with Zarr3OutputHelper { + + /** + * Serve .zattrs file for a dataset + * Uses the OME-NGFF standard (see https://ngff.openmicroscopy.org/latest/) + */ + def requestZAttrs( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String = "", + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + dataSource <- dataSourceRepository + .findUsable(DataSourceId(datasetDirectoryName, organizationId)) + .toFox ~> NOT_FOUND + dataLayer <- dataSource.getDataLayer(dataLayerName).toFox ~> NOT_FOUND + header = zarrStreamingService.getHeader(dataSource, dataLayer) + } yield Ok(Json.toJson(header)) + } + } + + def requestZarrJson( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String = "", + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + dataSource <- dataSourceRepository + .findUsable(DataSourceId(datasetDirectoryName, organizationId)) + .toFox ~> NOT_FOUND + dataLayer <- dataSource.getDataLayer(dataLayerName).toFox ~> NOT_FOUND + header = zarrStreamingService.getGroupHeader(dataSource, dataLayer) + } yield Ok(Json.toJson(header)) + } + } + + /** + * Zarr-specific datasource-properties.json file for a datasource. + * Note that the result here is not necessarily equal to the file used in the underlying storage. + */ + def requestDataSource( + organizationId: String, + datasetDirectoryName: String, + zarrVersion: Int, + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + dataSource <- dataSourceRepository + .findUsable(DataSourceId(datasetDirectoryName, organizationId)) + .toFox ~> NOT_FOUND + zarrSource = zarrStreamingService.getZarrDataSource(dataSource, zarrVersion) + } yield Ok(Json.toJson(zarrSource)) + } + } + + def requestRawZarrCube( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mag: String, + coordinates: String, + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + result <- zarrStreamingService.rawZarrCube(dataSource, dataLayer, mag, coordinates) + } yield Ok(result) + } + } + + def requestZArray( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mag: String, + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + zarrHeader <- zarrStreamingService.getZArray(dataLayer, mag) + } yield Ok(Json.toJson(zarrHeader)) + } + } + + def requestZarrJsonForMag( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mag: String, + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) + zarrJson <- zarrStreamingService.requestZarrJsonForMag(dataSource, dataLayer, mag) + } yield Ok(Json.toJson(zarrJson)) + } + } + + def requestDataLayerDirectoryContents( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + zarrVersion: Int + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + contents <- zarrStreamingService.dataLayerDirectoryContents(dataSource, dataLayer, zarrVersion) + } yield + Ok( + views.html.datastoreZarrDatasourceDir( + "Datastore", + "%s/%s/%s".format(organizationId, datasetDirectoryName, dataLayerName), + contents + )).withHeaders() + + } + } + + def requestDataLayerMagDirectoryContents( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mag: String, + zarrVersion: Int + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + contents <- zarrStreamingService.dataLayerMagDirectoryContents(dataSource, dataLayer, mag, zarrVersion) + } yield + Ok( + views.html.datastoreZarrDatasourceDir( + "Datastore", + "%s/%s/%s/%s".format(organizationId, datasetDirectoryName, dataLayerName, mag), + contents + )).withHeaders() + } + } + + def requestDataSourceDirectoryContents( + organizationId: String, + datasetDirectoryName: String, + zarrVersion: Int + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + dataSource <- dataSourceRepository + .findUsable(DataSourceId(datasetDirectoryName, organizationId)) + .toFox ~> NOT_FOUND + files <- zarrStreamingService.dataSourceDirectoryContents(dataSource, zarrVersion) + } yield + Ok( + views.html.datastoreZarrDatasourceDir( + "Datastore", + s"$organizationId/$datasetDirectoryName", + List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) ++ files + )) + } + } + + def requestZGroup(organizationId: String, + datasetDirectoryName: String, + dataLayerName: String = ""): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContextForSyncBlock( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + Ok(zarrStreamingService.zGroupJson) + } + } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index 909e5c6da19..7b2d6024c26 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -2,14 +2,12 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.dataformats.layers.{ZarrDataLayer, ZarrLayer, ZarrSegmentationLayer} import com.scalableminds.webknossos.datastore.dataformats.zarr.Zarr3OutputHelper -import com.scalableminds.webknossos.datastore.datareaders.zarr.{ - NgffMetadata, - NgffMetadataV0_5 -} +import com.scalableminds.webknossos.datastore.datareaders.zarr.{NgffMetadata, NgffMetadataV0_5} import com.scalableminds.webknossos.datastore.datareaders.zarr3.NgffZarr3GroupHeader import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType, AnnotationSource} import com.scalableminds.webknossos.datastore.models.datasource._ @@ -22,7 +20,7 @@ import scala.concurrent.ExecutionContext import com.scalableminds.webknossos.datastore.datareaders.AxisOrder class ZarrStreamingController @Inject()( - dataSourceRepository: DataSourceRepository, + datasetCache: DatasetCache, accessTokenService: DataStoreAccessTokenService, binaryDataServiceHolder: BinaryDataServiceHolder, remoteWebknossosClient: DSRemoteWebknossosClient, @@ -43,16 +41,13 @@ class ZarrStreamingController @Inject()( * Uses the OME-NGFF standard (see https://ngff.openmicroscopy.org/latest/) */ def requestZAttrs( - organizationId: String, - datasetDirectoryName: String, + datasetId: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - dataSource <- dataSourceRepository - .findUsable(DataSourceId(datasetDirectoryName, organizationId)) - .toFox ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + dataSource <- datasetCache.getById(datasetIdValidated) ~> NOT_FOUND dataLayer <- dataSource.getDataLayer(dataLayerName).toFox ~> NOT_FOUND header = zarrStreamingService.getHeader(dataSource, dataLayer) } yield Ok(Json.toJson(header)) @@ -60,16 +55,13 @@ class ZarrStreamingController @Inject()( } def requestZarrJson( - organizationId: String, - datasetDirectoryName: String, + datasetId: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - dataSource <- dataSourceRepository - .findUsable(DataSourceId(datasetDirectoryName, organizationId)) - .toFox ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + dataSource <- datasetCache.getById(datasetIdValidated) ~> NOT_FOUND dataLayer <- dataSource.getDataLayer(dataLayerName).toFox ~> NOT_FOUND header = zarrStreamingService.getGroupHeader(dataSource, dataLayer) } yield Ok(Json.toJson(header)) @@ -88,10 +80,8 @@ class ZarrStreamingController @Inject()( }, orElse = annotationSource => for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer( - annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache + .getWithLayer(annotationSource.datasetId, dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND dataSourceOmeNgffHeader = NgffMetadata.fromNameVoxelSizeAndMags(dataLayerName, dataSource.scale, dataLayer.sortedMags) @@ -112,10 +102,8 @@ class ZarrStreamingController @Inject()( }, orElse = annotationSource => for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer( - annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache + .getWithLayer(annotationSource.datasetId, dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND dataSourceOmeNgffHeader = NgffMetadataV0_5.fromNameVoxelSizeAndMags(dataLayerName, dataSource.scale, dataLayer.sortedMags, @@ -130,16 +118,13 @@ class ZarrStreamingController @Inject()( * Note that the result here is not necessarily equal to the file used in the underlying storage. */ def requestDataSource( - organizationId: String, - datasetDirectoryName: String, + datasetId: String, zarrVersion: Int, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - dataSource <- dataSourceRepository - .findUsable(DataSourceId(datasetDirectoryName, organizationId)) - .toFox ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + dataSource <- datasetCache.getById(datasetIdValidated) ~> NOT_FOUND zarrSource = zarrStreamingService.getZarrDataSource(dataSource, zarrVersion) } yield Ok(Json.toJson(zarrSource)) } @@ -203,9 +188,7 @@ class ZarrStreamingController @Inject()( relevantTokenContext = if (annotationSource.accessViaPrivateLink) TokenContext(Some(accessToken)) else tokenContextForRequest volumeAnnotationLayers = annotationSource.annotationLayers.filter(_.typ == AnnotationLayerType.Volume) - dataSource <- dataSourceRepository - .findUsable(DataSourceId(annotationSource.datasetDirectoryName, annotationSource.organizationId)) - .toFox ~> NOT_FOUND + dataSource <- datasetCache.getById(annotationSource.datasetId) ?~> Messages("dataSource.notFound") ~> NOT_FOUND dataSourceLayers = dataSource.dataLayers .filter(dL => !volumeAnnotationLayers.exists(_.name == dL.name)) .map(convertLayerToZarrLayer(_, zarrVersion)) @@ -221,18 +204,15 @@ class ZarrStreamingController @Inject()( } def requestRawZarrCube( - organizationId: String, - datasetDirectoryName: String, + datasetId: String, dataLayerName: String, mag: String, coordinates: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ~> NOT_FOUND result <- zarrStreamingService.rawZarrCube(dataSource, dataLayer, mag, coordinates) } yield Ok(result) } @@ -253,44 +233,36 @@ class ZarrStreamingController @Inject()( .map(Ok(_)), orElse = annotationSource => for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer( - annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache + .getWithLayer(annotationSource.datasetId, dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND zarrCube <- zarrStreamingService.rawZarrCube(dataSource, dataLayer, mag, coordinates) } yield Ok(zarrCube) ) } def requestZArray( - organizationId: String, - datasetDirectoryName: String, + datasetId: String, dataLayerName: String, mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + (_, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ~> NOT_FOUND zarrHeader <- zarrStreamingService.getZArray(dataLayer, mag) } yield Ok(Json.toJson(zarrHeader)) } } def requestZarrJsonForMag( - organizationId: String, - datasetDirectoryName: String, + datasetId: String, dataLayerName: String, mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ~> NOT_FOUND zarrJson <- zarrStreamingService.requestZarrJsonForMag(dataSource, dataLayer, mag) } yield Ok(Json.toJson(zarrJson)) } @@ -307,9 +279,7 @@ class ZarrStreamingController @Inject()( .map(z => Ok(Json.toJson(z))), orElse = annotationSource => for { - (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName) ?~> Messages( + (_, dataLayer) <- datasetCache.getWithLayer(annotationSource.datasetId, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND zArray <- zarrStreamingService.getZArray(dataLayer, mag) } yield Ok(Json.toJson(zArray)) @@ -327,10 +297,8 @@ class ZarrStreamingController @Inject()( .map(z => Ok(Json.toJson(z))), orElse = annotationSource => for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer( - annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache + .getWithLayer(annotationSource.datasetId, dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND zarrJson <- zarrStreamingService.requestZarrJsonForMag(dataSource, dataLayer, mag) } yield Ok(Json.toJson(zarrJson)) ) @@ -353,23 +321,20 @@ class ZarrStreamingController @Inject()( } yield result def requestDataLayerDirectoryContents( - organizationId: String, - datasetDirectoryName: String, + datasetId: String, dataLayerName: String, zarrVersion: Int ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ~> NOT_FOUND contents <- zarrStreamingService.dataLayerDirectoryContents(dataSource, dataLayer, zarrVersion) } yield Ok( views.html.datastoreZarrDatasourceDir( "Datastore", - "%s/%s/%s".format(organizationId, datasetDirectoryName, dataLayerName), + "%s/%s".format(datasetId, dataLayerName), contents )).withHeaders() @@ -397,34 +362,29 @@ class ZarrStreamingController @Inject()( )).withHeaders()), orElse = annotationSource => for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer( - annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache + .getWithLayer(annotationSource.datasetId, dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND content <- zarrStreamingService.dataLayerDirectoryContents(dataSource, dataLayer, zarrVersion) } yield Ok(Json.toJson(content)) ) } def requestDataLayerMagDirectoryContents( - organizationId: String, - datasetDirectoryName: String, + datasetId: String, dataLayerName: String, mag: String, zarrVersion: Int ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ~> NOT_FOUND contents <- zarrStreamingService.dataLayerMagDirectoryContents(dataSource, dataLayer, mag, zarrVersion) } yield Ok( views.html.datastoreZarrDatasourceDir( "Datastore", - "%s/%s/%s/%s".format(organizationId, datasetDirectoryName, dataLayerName, mag), + "%s/%s/%s".format(datasetId, dataLayerName, mag), contents )).withHeaders() } @@ -454,32 +414,27 @@ class ZarrStreamingController @Inject()( )).withHeaders()), orElse = annotationSource => for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer( - annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache + .getWithLayer(annotationSource.datasetId, dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND contents <- zarrStreamingService.dataLayerMagDirectoryContents(dataSource, dataLayer, mag, zarrVersion) } yield Ok(Json.toJson(contents)) ) } def requestDataSourceDirectoryContents( - organizationId: String, - datasetDirectoryName: String, + datasetId: String, zarrVersion: Int ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - dataSource <- dataSourceRepository - .findUsable(DataSourceId(datasetDirectoryName, organizationId)) - .toFox ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + dataSource <- datasetCache.getById(datasetIdValidated) files <- zarrStreamingService.dataSourceDirectoryContents(dataSource, zarrVersion) } yield Ok( views.html.datastoreZarrDatasourceDir( "Datastore", - s"$organizationId/$datasetDirectoryName", + datasetId, List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) ++ files )) } @@ -498,12 +453,9 @@ class ZarrStreamingController @Inject()( )) } - def requestZGroup(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String = ""): Action[AnyContent] = + def requestZGroup(datasetId: String, dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContextForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContextForSyncBlock(UserAccessRequest.readDataset(datasetId)) { Ok(zarrStreamingService.zGroupJson) } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ZarrStreamingService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ZarrStreamingService.scala index 222109cc60b..63786bc688b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ZarrStreamingService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ZarrStreamingService.scala @@ -173,7 +173,7 @@ class ZarrStreamingService @Inject()( dataSource: DataSource, dataLayer: DataLayer, zarrVersion: Int - )(implicit m: MessagesProvider): Fox[List[String]] = + ): Fox[List[String]] = for { _ <- Fox.successful(()) mags = dataLayer.sortedMags diff --git a/webknossos-datastore/conf/datastore.latest.routes b/webknossos-datastore/conf/datastore.latest.routes index b5bef4bf108..a19c02debe1 100644 --- a/webknossos-datastore/conf/datastore.latest.routes +++ b/webknossos-datastore/conf/datastore.latest.routes @@ -22,18 +22,18 @@ GET /wkDatasets/:datasetId/layers/:dataLayerName/histogram GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaKnossos(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) # Zarr2 compatible routes -GET /zarr/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(organizationId: String, datasetDirectoryName: String, dataLayerName="") -GET /zarr/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZAttrs(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZArray(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) +GET /zarr/:datasetId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(datasetId: String, dataLayerName="") +GET /zarr/:datasetId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(datasetId: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: String, dataLayerName: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: String, dataLayerName: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZAttrs(datasetId: String, dataLayerName: String) +GET /zarr/:datasetId/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(datasetId: String, dataLayerName: String) +GET /zarr/:datasetId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZArray(datasetId: String, dataLayerName: String, mag: String) +GET /zarr/:datasetId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(datasetId: String, dataLayerName: String, mag: String, coordinates: String) GET /annotations/zarr/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceDirectoryContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) GET /annotations/zarr/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceDirectoryContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) @@ -49,16 +49,16 @@ GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/.zarray GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) # Zarr3 compatible routes -GET /zarr3_experimental/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJson(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJsonForMag(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) +GET /zarr3_experimental/:datasetId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(datasetId: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: String, dataLayerName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: String, dataLayerName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJson(datasetId: String, dataLayerName: String) +GET /zarr3_experimental/:datasetId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJsonForMag(datasetId: String, dataLayerName: String, mag: String) +GET /zarr3_experimental/:datasetId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(datasetId: String, dataLayerName: String, mag: String, coordinates: String) GET /annotations/zarr3_experimental/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceDirectoryContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) GET /annotations/zarr3_experimental/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceDirectoryContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) diff --git a/webknossos-datastore/conf/datastore.versioned.routes b/webknossos-datastore/conf/datastore.versioned.routes index 71101ccdbe3..bc0e5987208 100644 --- a/webknossos-datastore/conf/datastore.versioned.routes +++ b/webknossos-datastore/conf/datastore.versioned.routes @@ -1,6 +1,34 @@ # Note: keep this in sync with the reported version numbers in the com.scalableminds.util.mvc.ApiVersioning trait +-> /v10/ datastore.latest.Routes -> /v9/ datastore.latest.Routes + +# Zarr2 compatible routes +GET /v9/zarr/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v9/zarr/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v9/zarr/:organizationId/:datasetDirectoryName/.zgroup @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZGroup(organizationId: String, datasetDirectoryName: String, dataLayerName="") +GET /v9/zarr/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSource(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZAttrs(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZGroup(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZArray(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawZarrCube(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) + +# Zarr3 compatible routes +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSource(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZarrJson(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZarrJsonForMag(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawZarrCube(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) + -> /v8/ datastore.latest.Routes -> /v7/ datastore.latest.Routes -> /v6/ datastore.latest.Routes diff --git a/webknossos-tracingstore/conf/tracingstore.versioned.routes b/webknossos-tracingstore/conf/tracingstore.versioned.routes index 1faf9b948b1..aad61de028d 100644 --- a/webknossos-tracingstore/conf/tracingstore.versioned.routes +++ b/webknossos-tracingstore/conf/tracingstore.versioned.routes @@ -1,5 +1,6 @@ # Note: keep this in sync with the reported version numbers in the com.scalableminds.util.mvc.ApiVersioning trait +-> /v10/ tracingstore.latest.Routes -> /v9/ tracingstore.latest.Routes -> /v8/ tracingstore.latest.Routes -> /v7/ tracingstore.latest.Routes From ac0f66d4dd0c0fc7b270d52b96cd7017bcca0dea Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 7 Jul 2025 12:13:11 +0200 Subject: [PATCH 08/10] Use datasetId in BinaryDataController --- .../controllers/BinaryDataController.scala | 141 +++----- .../controllers/DataSourceController.scala | 5 +- .../controllers/LegacyController.scala | 329 +++++++++++++++++- .../controllers/WKDatasetController.scala | 199 ----------- .../controllers/ZarrStreamingController.scala | 49 ++- .../services/AccessTokenService.scala | 3 + .../datastore/services/DatasetCache.scala | 2 +- .../conf/datastore.latest.routes | 71 ++-- .../conf/datastore.versioned.routes | 14 + 9 files changed, 452 insertions(+), 361 deletions(-) delete mode 100644 webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/WKDatasetController.scala diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala index e1fe95fdd5f..11c2219d99b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala @@ -4,6 +4,7 @@ import com.google.inject.Inject import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.image.{Color, JPEGWriter} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.DataStoreConfig @@ -31,7 +32,7 @@ import java.nio.{ByteBuffer, ByteOrder} import scala.concurrent.ExecutionContext class BinaryDataController @Inject()( - dataSourceRepository: DataSourceRepository, + datasetCache: DatasetCache, config: DataStoreConfig, accessTokenService: DataStoreAccessTokenService, binaryDataServiceHolder: BinaryDataServiceHolder, @@ -50,38 +51,32 @@ class BinaryDataController @Inject()( (binaryDataService, mappingService, config.Datastore.AdHocMesh.timeout, config.Datastore.AdHocMesh.actorPoolSize) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.dataStoreAdHocMeshService - def requestViaWebknossos( - organizationId: String, - datasetDirectoryName: String, - dataLayerName: String - ): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - logTime(slackNotificationService.noticeSlowRequest) { - val t = Instant.now - for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND - (data, indices) <- requestData(dataSource.id, dataLayer, request.body) - duration = Instant.since(t) - _ = if (duration > (10 seconds)) - logger.info( - s"Complete data request for $organizationId/$datasetDirectoryName/$dataLayerName took ${formatDuration(duration)}." - + request.body.headOption - .map(firstReq => s" First of ${request.body.size} requests was $firstReq") - .getOrElse("")) - } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + def requestViaWebknossos(datasetId: ObjectId, dataLayerName: String): Action[List[WebknossosDataRequest]] = + Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { + logTime(slackNotificationService.noticeSlowRequest) { + val t = Instant.now + for { + dataSource <- datasetCache.getById(datasetId) + dataLayer <- dataSource.getDataLayer(dataLayerName).toFox ?~> "Data layer not found" ~> NOT_FOUND + (data, indices) <- requestData(dataSource.id, dataLayer, request.body) + duration = Instant.since(t) + _ = if (duration > (10 seconds)) + logger.info( + s"Complete data request for $datasetId/$dataLayerName took ${formatDuration(duration)}." + + request.body.headOption + .map(firstReq => s" First of ${request.body.size} requests was $firstReq") + .getOrElse("")) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + } } } - } /** * Handles requests for raw binary data via HTTP GET. */ def requestRawCuboid( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, // Mag1 coordinates of the top-left corner of the bounding box x: Int, @@ -97,12 +92,9 @@ class BinaryDataController @Inject()( halfByte: Boolean, mappingName: Option[String] ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" dataRequest = DataRequest( VoxelPosition(x, y, z, magParsed), @@ -116,39 +108,30 @@ class BinaryDataController @Inject()( } } - def requestRawCuboidPost( - organizationId: String, - datasetDirectoryName: String, - dataLayerName: String - ): Action[RawCuboidRequest] = Action.async(validateJson[RawCuboidRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND - (data, indices) <- requestData(dataSource.id, dataLayer, request.body) - } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + def requestRawCuboidPost(datasetId: ObjectId, dataLayerName: String): Action[RawCuboidRequest] = + Action.async(validateJson[RawCuboidRequest]) { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { + for { + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND + (data, indices) <- requestData(dataSource.id, dataLayer, request.body) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + } } - } /** * Handles a request for raw binary data via a HTTP GET. Used by knossos. */ - def requestViaKnossos(organizationId: String, - datasetDirectoryName: String, + def requestViaKnossos(datasetId: ObjectId, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND dataRequest = DataRequest( VoxelPosition(x * cubeSize * mag, y * cubeSize * mag, z * cubeSize * mag, Vec3Int(mag, mag, mag)), cubeSize, @@ -160,8 +143,7 @@ class BinaryDataController @Inject()( } } - def thumbnailJpeg(organizationId: String, - datasetDirectoryName: String, + def thumbnailJpeg(datasetId: ObjectId, dataLayerName: String, x: Int, y: Int, @@ -174,12 +156,9 @@ class BinaryDataController @Inject()( intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> Messages( + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" dataRequest = DataRequest( @@ -216,17 +195,14 @@ class BinaryDataController @Inject()( } def mappingJson( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> Messages("dataLayer.notFound") mappingRequest = DataServiceMappingRequest(Some(dataSource.id), segmentationLayer, mappingName) result <- mappingService.handleMappingRequest(mappingRequest) @@ -237,16 +213,12 @@ class BinaryDataController @Inject()( /** * Handles ad-hoc mesh requests. */ - def requestAdHocMesh(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[WebknossosAdHocMeshRequest] = + def requestAdHocMesh(datasetId: ObjectId, dataLayerName: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> "dataLayer.mustBeSegmentation" adHocMeshRequest = AdHocMeshRequest( Some(dataSource.id), @@ -279,27 +251,22 @@ class BinaryDataController @Inject()( private def formatNeighborList(neighbors: List[Int]): String = "[" + neighbors.mkString(", ") + "]" - def findData(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = + def findData(datasetId: ObjectId, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND positionAndMagOpt <- findDataService.findPositionWithData(dataSource.id, dataLayer) } yield Ok(Json.obj("position" -> positionAndMagOpt.map(_._1), "mag" -> positionAndMagOpt.map(_._2))) } } - def histogram(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = + def histogram(datasetId: ObjectId, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> Messages( + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND ?~> Messages("histogram.layerMissing", dataLayerName) listOfHistograms <- findDataService.createHistogram(dataSource.id, dataLayer) ?~> Messages("histogram.failed", dataLayerName) @@ -314,7 +281,7 @@ class BinaryDataController @Inject()( )(implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = { val requests = dataRequests.map(r => DataServiceDataRequest(Some(dataSourceId), dataLayer, r.cuboid(dataLayer), r.settings)) - binaryDataService.handleDataRequests(requests) + binaryDataService.handleDataRequests(requests) // TODO: Use new dataset id here as well (?) } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 3a825dfe120..e3551d6fa4a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong @@ -742,8 +743,8 @@ class DataSourceController @Inject()( } } - def invalidateCache(datasetId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataset(datasetId)) { + def invalidateCache(datasetId: ObjectId): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataset(datasetId.toString)) { datasetCache.invalidateCache(datasetId) Future.successful(Ok) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala index 1a14f8b10d3..11a85561670 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala @@ -1,21 +1,58 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.util.image.{Color, JPEGWriter} +import com.scalableminds.util.time.Instant +import com.scalableminds.util.tools.Box.tryo +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.dataformats.zarr.Zarr3OutputHelper -import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, GenericDataSource} +import com.scalableminds.webknossos.datastore.helpers.MissingBucketHeaders +import com.scalableminds.webknossos.datastore.image.{ImageCreator, ImageCreatorParameters} +import com.scalableminds.webknossos.datastore.models.DataRequestCollection._ +import com.scalableminds.webknossos.datastore.models.{ + DataRequest, + RawCuboidRequest, + VoxelPosition, + WebknossosAdHocMeshRequest, + WebknossosDataRequest +} +import com.scalableminds.webknossos.datastore.models.datasource.{ + Category, + DataLayer, + DataSourceId, + GenericDataSource, + SegmentationLayer +} +import com.scalableminds.webknossos.datastore.models.requests.{ + DataServiceDataRequest, + DataServiceMappingRequest, + DataServiceRequestSettings +} +import com.scalableminds.webknossos.datastore.services.mesh.{AdHocMeshRequest, AdHocMeshService, AdHocMeshServiceHolder} import com.scalableminds.webknossos.datastore.services.{ + BinaryDataService, BinaryDataServiceHolder, DSRemoteTracingstoreClient, DSRemoteWebknossosClient, DataSourceRepository, DataStoreAccessTokenService, + FindDataService, + MappingService, UserAccessRequest, ZarrStreamingService } +import com.scalableminds.webknossos.datastore.slacknotification.DSSlackNotificationService +import play.api.i18n.Messages import play.api.libs.json.Json -import play.api.mvc.{Action, AnyContent} +import play.api.mvc.{Action, AnyContent, PlayBodyParsers, RawBuffer} +import java.io.ByteArrayOutputStream +import java.nio.{ByteBuffer, ByteOrder} import scala.concurrent.ExecutionContext +import scala.concurrent.duration.DurationInt class LegacyController @Inject()( dataSourceRepository: DataSourceRepository, @@ -23,10 +60,294 @@ class LegacyController @Inject()( binaryDataServiceHolder: BinaryDataServiceHolder, remoteWebknossosClient: DSRemoteWebknossosClient, remoteTracingstoreClient: DSRemoteTracingstoreClient, + config: DataStoreConfig, + mappingService: MappingService, + slackNotificationService: DSSlackNotificationService, + adHocMeshServiceHolder: AdHocMeshServiceHolder, + findDataService: FindDataService, zarrStreamingService: ZarrStreamingService -)(implicit ec: ExecutionContext) +)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller - with Zarr3OutputHelper { + with Zarr3OutputHelper + with MissingBucketHeaders { + + // BINARY DATA ROUTES + + override def allowRemoteOrigin: Boolean = true + + val binaryDataService: BinaryDataService = binaryDataServiceHolder.binaryDataService + adHocMeshServiceHolder.dataStoreAdHocMeshConfig = + (binaryDataService, mappingService, config.Datastore.AdHocMesh.timeout, config.Datastore.AdHocMesh.actorPoolSize) + val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.dataStoreAdHocMeshService + + def requestViaWebknossos( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String + ): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + logTime(slackNotificationService.noticeSlowRequest) { + val t = Instant.now + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + (data, indices) <- requestData(dataSource.id, dataLayer, request.body) + duration = Instant.since(t) + _ = if (duration > (10 seconds)) + logger.info( + s"Complete data request for $organizationId/$datasetDirectoryName/$dataLayerName took ${formatDuration(duration)}." + + request.body.headOption + .map(firstReq => s" First of ${request.body.size} requests was $firstReq") + .getOrElse("")) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + } + } + } + + /** + * Handles requests for raw binary data via HTTP GET. + */ + def requestRawCuboid( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + // Mag1 coordinates of the top-left corner of the bounding box + x: Int, + y: Int, + z: Int, + // Target-mag size of the bounding box + width: Int, + height: Int, + depth: Int, + // Mag in three-component format (e.g. 1-1-1 or 16-16-8) + mag: String, + // If true, use lossy compression by sending only half-bytes of the data + halfByte: Boolean, + mappingName: Option[String] + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" + dataRequest = DataRequest( + VoxelPosition(x, y, z, magParsed), + width, + height, + depth, + DataServiceRequestSettings(halfByte = halfByte, appliedAgglomerate = mappingName) + ) + (data, indices) <- requestData(dataSource.id, dataLayer, dataRequest) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + } + } + + def requestRawCuboidPost( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String + ): Action[RawCuboidRequest] = Action.async(validateJson[RawCuboidRequest]) { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + (data, indices) <- requestData(dataSource.id, dataLayer, request.body) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + } + } + + /** + * Handles a request for raw binary data via a HTTP GET. Used by knossos. + */ + def requestViaKnossos(organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mag: Int, + x: Int, + y: Int, + z: Int, + cubeSize: Int): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + dataRequest = DataRequest( + VoxelPosition(x * cubeSize * mag, y * cubeSize * mag, z * cubeSize * mag, Vec3Int(mag, mag, mag)), + cubeSize, + cubeSize, + cubeSize + ) + (data, indices) <- requestData(dataSource.id, dataLayer, dataRequest) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + } + } + + def thumbnailJpeg(organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + x: Int, + y: Int, + z: Int, + width: Int, + height: Int, + mag: String, + mappingName: Option[String], + intensityMin: Option[Double], + intensityMax: Option[Double], + color: Option[String], + invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND + magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" + dataRequest = DataRequest( + VoxelPosition(x, y, z, magParsed), + width, + height, + depth = 1, + DataServiceRequestSettings(appliedAgglomerate = mappingName) + ) + (data, _) <- requestData(dataSource.id, dataLayer, dataRequest) + intensityRange: Option[(Double, Double)] = intensityMin.flatMap(min => intensityMax.map(max => (min, max))) + layerColor = color.flatMap(Color.fromHTML) + params = ImageCreatorParameters( + dataLayer.elementClass, + useHalfBytes = false, + slideWidth = width, + slideHeight = height, + imagesPerRow = 1, + blackAndWhite = false, + intensityRange = intensityRange, + isSegmentation = dataLayer.category == Category.segmentation, + color = layerColor, + invertColor = invertColor + ) + dataWithFallback = if (data.length == 0) + new Array[Byte](width * height * dataLayer.bytesPerElement) + else data + spriteSheet <- ImageCreator.spriteSheetFor(dataWithFallback, params).toFox ?~> "image.create.failed" + firstSheet <- spriteSheet.pages.headOption.toFox ?~> "image.page.failed" + outputStream = new ByteArrayOutputStream() + _ = new JPEGWriter().writeToOutputStream(firstSheet.image)(outputStream) + } yield Ok(outputStream.toByteArray).as(jpegMimeType) + } + } + + def mappingJson( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mappingName: String + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> Messages("dataLayer.notFound") + mappingRequest = DataServiceMappingRequest(Some(dataSource.id), segmentationLayer, mappingName) + result <- mappingService.handleMappingRequest(mappingRequest) + } yield Ok(result) + } + } + + /** + * Handles ad-hoc mesh requests. + */ + def requestAdHocMesh(organizationId: String, + datasetDirectoryName: String, + dataLayerName: String): Action[WebknossosAdHocMeshRequest] = + Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> "dataLayer.mustBeSegmentation" + adHocMeshRequest = AdHocMeshRequest( + Some(dataSource.id), + segmentationLayer, + request.body.cuboid(dataLayer), + request.body.segmentId, + request.body.voxelSizeFactorInUnit, + tokenContextForRequest(request), + request.body.mapping, + request.body.mappingType, + request.body.additionalCoordinates, + request.body.findNeighbors, + ) + // The client expects the ad-hoc mesh as a flat float-array. Three consecutive floats form a 3D point, three + // consecutive 3D points (i.e., nine floats) form a triangle. + // There are no shared vertices between triangles. + (vertices, neighbors) <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) + } yield { + // We need four bytes for each float + val responseBuffer = ByteBuffer.allocate(vertices.length * 4).order(ByteOrder.LITTLE_ENDIAN) + responseBuffer.asFloatBuffer().put(vertices) + Ok(responseBuffer.array()).withHeaders(getNeighborIndices(neighbors): _*) + } + } + } + + private def getNeighborIndices(neighbors: List[Int]) = + List("NEIGHBORS" -> formatNeighborList(neighbors), "Access-Control-Expose-Headers" -> "NEIGHBORS") + + private def formatNeighborList(neighbors: List[Int]): String = + "[" + neighbors.mkString(", ") + "]" + + def findData(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + positionAndMagOpt <- findDataService.findPositionWithData(dataSource.id, dataLayer) + } yield Ok(Json.obj("position" -> positionAndMagOpt.map(_._1), "mag" -> positionAndMagOpt.map(_._2))) + } + } + + def histogram(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND ?~> Messages("histogram.layerMissing", dataLayerName) + listOfHistograms <- findDataService.createHistogram(dataSource.id, dataLayer) ?~> Messages("histogram.failed", + dataLayerName) + } yield Ok(Json.toJson(listOfHistograms)) + } + } + + private def requestData( + dataSourceId: DataSourceId, + dataLayer: DataLayer, + dataRequests: DataRequestCollection + )(implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = { + val requests = + dataRequests.map(r => DataServiceDataRequest(Some(dataSourceId), dataLayer, r.cuboid(dataLayer), r.settings)) + binaryDataService.handleDataRequests(requests) + } + + // ZARR ROUTES /** * Serve .zattrs file for a dataset diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/WKDatasetController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/WKDatasetController.scala deleted file mode 100644 index 209d000e79e..00000000000 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/WKDatasetController.scala +++ /dev/null @@ -1,199 +0,0 @@ -package com.scalableminds.webknossos.datastore.controllers - -import com.google.inject.Inject -import com.scalableminds.util.accesscontext.TokenContext -import com.scalableminds.util.geometry.Vec3Int -import com.scalableminds.util.image.{Color, JPEGWriter} -import com.scalableminds.util.objectid.ObjectId -import com.scalableminds.util.time.Instant -import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.helpers.MissingBucketHeaders -import com.scalableminds.webknossos.datastore.image.{ImageCreator, ImageCreatorParameters} -import com.scalableminds.webknossos.datastore.models.DataRequestCollection._ -import com.scalableminds.webknossos.datastore.models.{ - DataRequest, - RawCuboidRequest, - VoxelPosition, - WebknossosDataRequest -} -import com.scalableminds.webknossos.datastore.models.requests.{DataServiceDataRequest, DataServiceRequestSettings} -import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataLayer, DataSourceId} -import com.scalableminds.webknossos.datastore.services.{ - BinaryDataService, - BinaryDataServiceHolder, - DataStoreAccessTokenService, - DatasetCache, - FindDataService, - UserAccessRequest -} -import com.scalableminds.webknossos.datastore.slacknotification.DSSlackNotificationService -import play.api.i18n.Messages -import play.api.libs.json.Json -import play.api.mvc.{Action, AnyContent, PlayBodyParsers, RawBuffer} - -import java.io.ByteArrayOutputStream -import scala.concurrent.ExecutionContext -import scala.concurrent.duration.DurationInt - -/** - * This is equivalent to the BinaryDataController for Datasets by DatasetId - */ -class WKDatasetController @Inject()( - accessTokenService: DataStoreAccessTokenService, - binaryDataServiceHolder: BinaryDataServiceHolder, - findDataService: FindDataService, - slackNotificationService: DSSlackNotificationService, - datasetCache: DatasetCache -)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) - extends Controller - with MissingBucketHeaders { - - val binaryDataService: BinaryDataService = binaryDataServiceHolder.binaryDataService - - def requestViaWebknossos(datasetId: String, dataLayerName: String): Action[List[WebknossosDataRequest]] = - Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { - logTime(slackNotificationService.noticeSlowRequest) { - val t = Instant.now - for { - datasetId <- ObjectId.fromString(datasetId) - dataSource <- datasetCache.getById(datasetId) - dataLayer <- dataSource.getDataLayer(dataLayerName).toFox ?~> "Data layer not found" ~> NOT_FOUND - (data, indices) <- requestData(dataSource.id, dataLayer, request.body) - duration = Instant.since(t) - _ = if (duration > (10 seconds)) - logger.info( - s"Complete data request for $datasetId/$dataLayerName took ${formatDuration(duration)}." - + request.body.headOption - .map(firstReq => s" First of ${request.body.size} requests was $firstReq") - .getOrElse("")) - } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) - } - } - } - - def requestRawCuboid(datasetId: String, - dataLayerName: String, - x: Int, - y: Int, - z: Int, - width: Int, - height: Int, - depth: Int, - mag: String, - halfByte: Boolean, - mappingName: Option[String]): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { - for { - datasetId <- ObjectId.fromString(datasetId) - (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND - magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" - dataRequest = DataRequest( - VoxelPosition(x, y, z, magParsed), - width, - height, - depth, - DataServiceRequestSettings(halfByte = halfByte, appliedAgglomerate = mappingName) - ) - (data, indices) <- requestData(dataSource.id, dataLayer, dataRequest) - } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) - } - } - - def requestRawCuboidPost(datasetId: String, dataLayerName: String): Action[RawCuboidRequest] = - Action.async(validateJson[RawCuboidRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { - for { - datasetId <- ObjectId.fromString(datasetId) - (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND - (data, indices) <- requestData(dataSource.id, dataLayer, request.body) - } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) - } - } - - def thumbnailJpeg(datasetId: String, - dataLayerName: String, - x: Int, - y: Int, - z: Int, - width: Int, - height: Int, - mag: String, - mappingName: Option[String], - intensityMin: Option[Double], - intensityMax: Option[Double], - color: Option[String], - invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { - for { - datasetIdValidated <- ObjectId.fromString(datasetId) - (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND - magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" - dataRequest = DataRequest( - VoxelPosition(x, y, z, magParsed), - width, - height, - depth = 1, - DataServiceRequestSettings(appliedAgglomerate = mappingName) - ) - (data, _) <- requestData(dataSource.id, dataLayer, dataRequest) - intensityRange: Option[(Double, Double)] = intensityMin.flatMap(min => intensityMax.map(max => (min, max))) - layerColor = color.flatMap(Color.fromHTML) - params = ImageCreatorParameters( - dataLayer.elementClass, - useHalfBytes = false, - slideWidth = width, - slideHeight = height, - imagesPerRow = 1, - blackAndWhite = false, - intensityRange = intensityRange, - isSegmentation = dataLayer.category == Category.segmentation, - color = layerColor, - invertColor = invertColor - ) - dataWithFallback = if (data.length == 0) - new Array[Byte](width * height * dataLayer.bytesPerElement) - else data - spriteSheet <- ImageCreator.spriteSheetFor(dataWithFallback, params).toFox ?~> "image.create.failed" - firstSheet <- spriteSheet.pages.headOption.toFox ?~> "image.page.failed" - outputStream = new ByteArrayOutputStream() - _ = new JPEGWriter().writeToOutputStream(firstSheet.image)(outputStream) - } yield Ok(outputStream.toByteArray).as(jpegMimeType) - } - } - - def findData(datasetId: String, dataLayerName: String): Action[AnyContent] = - Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { - for { - datasetIdValidated <- ObjectId.fromString(datasetId) - (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ~> NOT_FOUND - positionAndMagOpt <- findDataService.findPositionWithData(dataSource.id, dataLayer) - } yield Ok(Json.obj("position" -> positionAndMagOpt.map(_._1), "mag" -> positionAndMagOpt.map(_._2))) - } - } - - def histogram(datasetId: String, dataLayerName: String): Action[AnyContent] = - Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { - for { - datasetIdValidated <- ObjectId.fromString(datasetId) - (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND ?~> Messages("histogram.layerMissing", dataLayerName) - listOfHistograms <- findDataService.createHistogram(dataSource.id, dataLayer) ?~> Messages("histogram.failed", - dataLayerName) - } yield Ok(Json.toJson(listOfHistograms)) - } - } - - private def requestData( - dataSourceId: DataSourceId, - dataLayer: DataLayer, - dataRequests: DataRequestCollection - )(implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = { - val requests = - dataRequests.map(r => DataServiceDataRequest(Some(dataSourceId), dataLayer, r.cuboid(dataLayer), r.settings)) - binaryDataService.handleDataRequests(requests) - } -} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index 7b2d6024c26..71223baf05c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -41,13 +41,12 @@ class ZarrStreamingController @Inject()( * Uses the OME-NGFF standard (see https://ngff.openmicroscopy.org/latest/) */ def requestZAttrs( - datasetId: String, + datasetId: ObjectId, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - datasetIdValidated <- ObjectId.fromString(datasetId) - dataSource <- datasetCache.getById(datasetIdValidated) ~> NOT_FOUND + dataSource <- datasetCache.getById(datasetId) ~> NOT_FOUND dataLayer <- dataSource.getDataLayer(dataLayerName).toFox ~> NOT_FOUND header = zarrStreamingService.getHeader(dataSource, dataLayer) } yield Ok(Json.toJson(header)) @@ -55,13 +54,12 @@ class ZarrStreamingController @Inject()( } def requestZarrJson( - datasetId: String, + datasetId: ObjectId, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - datasetIdValidated <- ObjectId.fromString(datasetId) - dataSource <- datasetCache.getById(datasetIdValidated) ~> NOT_FOUND + dataSource <- datasetCache.getById(datasetId) ~> NOT_FOUND dataLayer <- dataSource.getDataLayer(dataLayerName).toFox ~> NOT_FOUND header = zarrStreamingService.getGroupHeader(dataSource, dataLayer) } yield Ok(Json.toJson(header)) @@ -118,13 +116,12 @@ class ZarrStreamingController @Inject()( * Note that the result here is not necessarily equal to the file used in the underlying storage. */ def requestDataSource( - datasetId: String, + datasetId: ObjectId, zarrVersion: Int, ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - datasetIdValidated <- ObjectId.fromString(datasetId) - dataSource <- datasetCache.getById(datasetIdValidated) ~> NOT_FOUND + dataSource <- datasetCache.getById(datasetId) ~> NOT_FOUND zarrSource = zarrStreamingService.getZarrDataSource(dataSource, zarrVersion) } yield Ok(Json.toJson(zarrSource)) } @@ -204,15 +201,14 @@ class ZarrStreamingController @Inject()( } def requestRawZarrCube( - datasetId: String, + datasetId: ObjectId, dataLayerName: String, mag: String, coordinates: String, ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - datasetIdValidated <- ObjectId.fromString(datasetId) - (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND result <- zarrStreamingService.rawZarrCube(dataSource, dataLayer, mag, coordinates) } yield Ok(result) } @@ -241,28 +237,26 @@ class ZarrStreamingController @Inject()( } def requestZArray( - datasetId: String, + datasetId: ObjectId, dataLayerName: String, mag: String, ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - datasetIdValidated <- ObjectId.fromString(datasetId) - (_, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ~> NOT_FOUND + (_, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND zarrHeader <- zarrStreamingService.getZArray(dataLayer, mag) } yield Ok(Json.toJson(zarrHeader)) } } def requestZarrJsonForMag( - datasetId: String, + datasetId: ObjectId, dataLayerName: String, mag: String, ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - datasetIdValidated <- ObjectId.fromString(datasetId) - (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND zarrJson <- zarrStreamingService.requestZarrJsonForMag(dataSource, dataLayer, mag) } yield Ok(Json.toJson(zarrJson)) } @@ -321,14 +315,13 @@ class ZarrStreamingController @Inject()( } yield result def requestDataLayerDirectoryContents( - datasetId: String, + datasetId: ObjectId, dataLayerName: String, zarrVersion: Int ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - datasetIdValidated <- ObjectId.fromString(datasetId) - (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND contents <- zarrStreamingService.dataLayerDirectoryContents(dataSource, dataLayer, zarrVersion) } yield Ok( @@ -370,15 +363,14 @@ class ZarrStreamingController @Inject()( } def requestDataLayerMagDirectoryContents( - datasetId: String, + datasetId: ObjectId, dataLayerName: String, mag: String, zarrVersion: Int ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - datasetIdValidated <- ObjectId.fromString(datasetId) - (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND contents <- zarrStreamingService.dataLayerMagDirectoryContents(dataSource, dataLayer, mag, zarrVersion) } yield Ok( @@ -422,19 +414,18 @@ class ZarrStreamingController @Inject()( } def requestDataSourceDirectoryContents( - datasetId: String, + datasetId: ObjectId, zarrVersion: Int ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - datasetIdValidated <- ObjectId.fromString(datasetId) - dataSource <- datasetCache.getById(datasetIdValidated) + dataSource <- datasetCache.getById(datasetId) files <- zarrStreamingService.dataSourceDirectoryContents(dataSource, zarrVersion) } yield Ok( views.html.datastoreZarrDatasourceDir( "Datastore", - datasetId, + datasetId.toString, List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) ++ files )) } @@ -453,7 +444,7 @@ class ZarrStreamingController @Inject()( )) } - def requestZGroup(datasetId: String, dataLayerName: String = ""): Action[AnyContent] = + def requestZGroup(datasetId: ObjectId, dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContextForSyncBlock(UserAccessRequest.readDataset(datasetId)) { Ok(zarrStreamingService.zGroupJson) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala index 848baba4bcf..1fd5fc91709 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala @@ -43,6 +43,9 @@ object UserAccessRequest { def readDataset(datasetId: String): UserAccessRequest = UserAccessRequest(DataSourceId(datasetId, ""), AccessResourceType.dataset, AccessMode.read) + def readDataset(datasetId: ObjectId): UserAccessRequest = + UserAccessRequest(DataSourceId(datasetId.toString, ""), AccessResourceType.dataset, AccessMode.read) + def writeDataSource(dataSourceId: DataSourceId): UserAccessRequest = UserAccessRequest(dataSourceId, AccessResourceType.datasource, AccessMode.write) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetCache.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetCache.scala index 6e22f215db8..c9529c6471f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetCache.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetCache.scala @@ -26,6 +26,6 @@ class DatasetCache @Inject()(remoteWebknossosClient: DSRemoteWebknossosClient)(i dataLayer <- dataSource.getDataLayer(dataLayerName).toFox ?~> "Data layer not found" } yield (dataSource, dataLayer) - def invalidateCache(datasetId: String): Unit = cache.remove(ObjectId(datasetId)) + def invalidateCache(datasetId: ObjectId): Unit = cache.remove(datasetId) } diff --git a/webknossos-datastore/conf/datastore.latest.routes b/webknossos-datastore/conf/datastore.latest.routes index a19c02debe1..108baa5b256 100644 --- a/webknossos-datastore/conf/datastore.latest.routes +++ b/webknossos-datastore/conf/datastore.latest.routes @@ -4,36 +4,29 @@ GET /health @com.scalableminds.webknossos.datastore.controllers.Application.health # Read image data -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboidPost(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboid(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.thumbnailJpeg(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.findData(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.histogram(organizationId: String, datasetDirectoryName: String, dataLayerName: String) - -POST /wkDatasets/:datasetId/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.WKDatasetController.requestViaWebknossos(datasetId: String, dataLayerName: String) -POST /wkDatasets/:datasetId/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.WKDatasetController.requestRawCuboidPost(datasetId: String, dataLayerName: String) -GET /wkDatasets/:datasetId/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.WKDatasetController.requestRawCuboid(datasetId: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) -GET /wkDatasets/:datasetId/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.WKDatasetController.thumbnailJpeg(datasetId: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) -GET /wkDatasets/:datasetId/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.WKDatasetController.findData(datasetId: String, dataLayerName: String) -GET /wkDatasets/:datasetId/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.WKDatasetController.histogram(datasetId: String, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboidPost(datasetId: ObjectId, dataLayerName: String) +GET /datasets/:datasetId/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboid(datasetId: ObjectId, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) +GET /datasets/:datasetId/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.thumbnailJpeg(datasetId: ObjectId, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) +GET /datasets/:datasetId/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.findData(datasetId: ObjectId, dataLayerName: String) +GET /datasets/:datasetId/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.histogram(datasetId: ObjectId, dataLayerName: String) # Knossos compatible routes -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaKnossos(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) +GET /datasets/:datasetId/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaKnossos(datasetId: ObjectId, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) # Zarr2 compatible routes -GET /zarr/:datasetId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: String, zarrVersion: Int = 2) -GET /zarr/:datasetId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: String, zarrVersion: Int = 2) -GET /zarr/:datasetId/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(datasetId: String, dataLayerName="") -GET /zarr/:datasetId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(datasetId: String, zarrVersion: Int = 2) -GET /zarr/:datasetId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: String, dataLayerName: String, zarrVersion: Int = 2) -GET /zarr/:datasetId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: String, dataLayerName: String, zarrVersion: Int = 2) -GET /zarr/:datasetId/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZAttrs(datasetId: String, dataLayerName: String) -GET /zarr/:datasetId/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(datasetId: String, dataLayerName: String) -GET /zarr/:datasetId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /zarr/:datasetId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /zarr/:datasetId/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZArray(datasetId: String, dataLayerName: String, mag: String) -GET /zarr/:datasetId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(datasetId: String, dataLayerName: String, mag: String, coordinates: String) +GET /zarr/:datasetId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: ObjectId, zarrVersion: Int = 2) +GET /zarr/:datasetId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: ObjectId, zarrVersion: Int = 2) +GET /zarr/:datasetId/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(datasetId: ObjectId, dataLayerName="") +GET /zarr/:datasetId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(datasetId: ObjectId, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: ObjectId, dataLayerName: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: ObjectId, dataLayerName: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZAttrs(datasetId: ObjectId, dataLayerName: String) +GET /zarr/:datasetId/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(datasetId: ObjectId, dataLayerName: String) +GET /zarr/:datasetId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: ObjectId, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: ObjectId, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZArray(datasetId: ObjectId, dataLayerName: String, mag: String) +GET /zarr/:datasetId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(datasetId: ObjectId, dataLayerName: String, mag: String, coordinates: String) GET /annotations/zarr/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceDirectoryContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) GET /annotations/zarr/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceDirectoryContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) @@ -49,16 +42,16 @@ GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/.zarray GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) # Zarr3 compatible routes -GET /zarr3_experimental/:datasetId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:datasetId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:datasetId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(datasetId: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:datasetId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: String, dataLayerName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:datasetId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: String, dataLayerName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:datasetId/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJson(datasetId: String, dataLayerName: String) -GET /zarr3_experimental/:datasetId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:datasetId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:datasetId/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJsonForMag(datasetId: String, dataLayerName: String, mag: String) -GET /zarr3_experimental/:datasetId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(datasetId: String, dataLayerName: String, mag: String, coordinates: String) +GET /zarr3_experimental/:datasetId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: ObjectId, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: ObjectId, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(datasetId: ObjectId, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: ObjectId, dataLayerName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: ObjectId, dataLayerName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJson(datasetId: ObjectId, dataLayerName: String) +GET /zarr3_experimental/:datasetId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: ObjectId, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: ObjectId, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJsonForMag(datasetId: ObjectId, dataLayerName: String, mag: String) +GET /zarr3_experimental/:datasetId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(datasetId: ObjectId, dataLayerName: String, mag: String, coordinates: String) GET /annotations/zarr3_experimental/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceDirectoryContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) GET /annotations/zarr3_experimental/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceDirectoryContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) @@ -72,7 +65,7 @@ GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:m GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) # Segmentation mappings -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.mappingJson(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) +GET /datasets/:datasetId/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.mappingJson(datasetId: ObjectId, dataLayerName: String, mappingName: String) GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listMappings(organizationId: String, datasetDirectoryName: String, dataLayerName: String) # Agglomerate files @@ -97,7 +90,7 @@ POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerN POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes/synapses @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsesForAgglomerates(organizationId: String, datasetDirectoryName: String, dataLayerName: String) # Ad-Hoc Meshing -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(datasetId: ObjectId, dataLayerName: String) # Segment-Index files GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/hasSegmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.checkSegmentIndexFile(organizationId: String, datasetDirectoryName: String, dataLayerName: String) @@ -122,7 +115,7 @@ DELETE /datasets/:organizationId/:datasetDirectoryName/deleteOnDisk POST /datasets/compose @com.scalableminds.webknossos.datastore.controllers.DataSourceController.compose() POST /datasets/exploreRemote @com.scalableminds.webknossos.datastore.controllers.DataSourceController.exploreRemoteDataset() -DELETE /wkDatasets/:datasetId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.invalidateCache(datasetId: String) +DELETE /wkDatasets/:datasetId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.invalidateCache(datasetId: ObjectId) # Actions POST /triggers/checkInboxBlocking @com.scalableminds.webknossos.datastore.controllers.DataSourceController.triggerInboxCheckBlocking() diff --git a/webknossos-datastore/conf/datastore.versioned.routes b/webknossos-datastore/conf/datastore.versioned.routes index bc0e5987208..62666735fef 100644 --- a/webknossos-datastore/conf/datastore.versioned.routes +++ b/webknossos-datastore/conf/datastore.versioned.routes @@ -3,6 +3,20 @@ -> /v10/ datastore.latest.Routes -> /v9/ datastore.latest.Routes +# Read image data +POST /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestViaWebknossos(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawCuboidPost(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawCuboid(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) +GET /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.LegacyController.thumbnailJpeg(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) +GET /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.LegacyController.findData(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.LegacyController.histogram(organizationId: String, datasetDirectoryName: String, dataLayerName: String) + +GET /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestViaKnossos(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) + +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestAdHocMesh(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.LegacyController.mappingJson(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) + + # Zarr2 compatible routes GET /v9/zarr/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) GET /v9/zarr/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) From d51dea9dc22020c72035e436c06c3d12e3000b56 Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 7 Jul 2025 13:50:11 +0200 Subject: [PATCH 09/10] Agglomerate files by dataset id --- .../controllers/DataSourceController.scala | 64 +++++++------------ .../conf/datastore.latest.routes | 14 ++-- .../conf/datastore.versioned.routes | 4 +- 3 files changed, 31 insertions(+), 51 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index e3551d6fa4a..1d9c894bbed 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -244,48 +244,40 @@ class DataSourceController @Inject()( } def listMappings( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContextForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - addNoCacheHeaderFallback( - Ok(Json.toJson(dataSourceService.exploreMappings(organizationId, datasetDirectoryName, dataLayerName)))) + accessTokenService.validateAccessFromTokenContextForSyncBlock(UserAccessRequest.readDataset(datasetId)) { + addNoCacheHeaderFallback( // TODO + Ok(Json.toJson(dataSourceService.exploreMappings(???, ???, dataLayerName)))) } } def listAgglomerates( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + UserAccessRequest.readDataset(datasetId)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateList = agglomerateService.listAgglomeratesFiles(dataSource.id, dataLayer) } yield Ok(Json.toJson(agglomerateList)) } } def generateAgglomerateSkeleton( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + UserAccessRequest.readDataset(datasetId)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) skeleton <- agglomerateService .generateSkeleton(agglomerateFileKey, agglomerateId) ?~> "agglomerateSkeleton.failed" @@ -294,19 +286,16 @@ class DataSourceController @Inject()( } def agglomerateGraph( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + UserAccessRequest.readDataset(datasetId)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) agglomerateGraph <- agglomerateService .generateAgglomerateGraph(agglomerateFileKey, agglomerateId) ?~> "agglomerateGraph.failed" @@ -315,19 +304,16 @@ class DataSourceController @Inject()( } def positionForSegmentViaAgglomerateFile( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mappingName: String, segmentId: Long ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + UserAccessRequest.readDataset(datasetId)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) position <- agglomerateService .positionForSegmentId(agglomerateFileKey, segmentId) ?~> "getSegmentPositionFromAgglomerateFile.failed" @@ -336,18 +322,15 @@ class DataSourceController @Inject()( } def largestAgglomerateId( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + UserAccessRequest.readDataset(datasetId)){ for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) largestAgglomerateId: Long <- agglomerateService.largestAgglomerateId(agglomerateFileKey) } yield Ok(Json.toJson(largestAgglomerateId)) @@ -355,18 +338,15 @@ class DataSourceController @Inject()( } def agglomerateIdsForSegmentIds( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + UserAccessRequest.readDataset(datasetId)){ for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) agglomerateIds: Seq[Long] <- agglomerateService.agglomerateIdsForSegmentIds( agglomerateFileKey, diff --git a/webknossos-datastore/conf/datastore.latest.routes b/webknossos-datastore/conf/datastore.latest.routes index 108baa5b256..0490c0c3fa0 100644 --- a/webknossos-datastore/conf/datastore.latest.routes +++ b/webknossos-datastore/conf/datastore.latest.routes @@ -66,15 +66,15 @@ GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:m # Segmentation mappings GET /datasets/:datasetId/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.mappingJson(datasetId: ObjectId, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listMappings(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:datasetId/layers/:dataLayerName/mappings @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listMappings(datasetId: ObjectId, dataLayerName: String) # Agglomerate files -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listAgglomerates(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/skeleton/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.generateAgglomerateSkeleton(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/agglomerateGraph/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateGraph(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/largestAgglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.largestAgglomerateId(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForSegmentIds(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/positionForSegment @com.scalableminds.webknossos.datastore.controllers.DataSourceController.positionForSegmentViaAgglomerateFile(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, segmentId: Long) +GET /datasets/:datasetId/layers/:dataLayerName/agglomerates @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listAgglomerates(datasetId: ObjectId, dataLayerName: String) +GET /datasets/:datasetId/layers/:dataLayerName/agglomerates/:mappingName/skeleton/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.generateAgglomerateSkeleton(datasetId: ObjectId, dataLayerName: String, mappingName: String, agglomerateId: Long) +GET /datasets/:datasetId/layers/:dataLayerName/agglomerates/:mappingName/agglomerateGraph/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateGraph(datasetId: ObjectId, dataLayerName: String, mappingName: String, agglomerateId: Long) +GET /datasets/:datasetId/layers/:dataLayerName/agglomerates/:mappingName/largestAgglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.largestAgglomerateId(datasetId: ObjectId, dataLayerName: String, mappingName: String) +POST /datasets/:datasetId/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForSegmentIds(datasetId: ObjectId, dataLayerName: String, mappingName: String) +GET /datasets/:datasetId/layers/:dataLayerName/agglomerates/:mappingName/positionForSegment @com.scalableminds.webknossos.datastore.controllers.DataSourceController.positionForSegmentViaAgglomerateFile(datasetId: ObjectId, dataLayerName: String, mappingName: String, segmentId: Long) # Mesh files GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshFiles(organizationId: String, datasetDirectoryName: String, dataLayerName: String) diff --git a/webknossos-datastore/conf/datastore.versioned.routes b/webknossos-datastore/conf/datastore.versioned.routes index 62666735fef..67d4c60dcec 100644 --- a/webknossos-datastore/conf/datastore.versioned.routes +++ b/webknossos-datastore/conf/datastore.versioned.routes @@ -13,8 +13,8 @@ GET /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLay GET /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestViaKnossos(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestAdHocMesh(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.LegacyController.mappingJson(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) +POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestAdHocMesh(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.LegacyController.mappingJson(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) # Zarr2 compatible routes From 611e552f19ecfbd2ad27379721422b739c890c45 Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 7 Jul 2025 17:32:30 +0200 Subject: [PATCH 10/10] Update more routes to use dataset id --- app/controllers/AnnotationIOController.scala | 8 +- app/models/annotation/AnnotationService.scala | 5 +- .../dataset/WKRemoteDataStoreClient.scala | 18 ++- frontend/javascripts/admin/api/mesh.ts | 8 +- frontend/javascripts/admin/rest_api.ts | 19 +-- .../dataset/dataset_settings_view.tsx | 2 +- .../bucket_data_handling/wkstore_adapter.ts | 9 +- .../model/sagas/load_histogram_data_saga.ts | 2 +- .../sagas/meshes/precomputed_mesh_saga.ts | 4 +- .../segments_tab/segments_view_helper.tsx | 3 +- .../controllers/DSMeshController.scala | 46 ++---- .../controllers/DataSourceController.scala | 131 ++++++------------ .../controllers/LegacyController.scala | 24 +++- .../services/AccessTokenService.scala | 4 +- .../services/mesh/DSFullMeshService.scala | 42 ++---- .../conf/datastore.latest.routes | 28 ++-- .../TSRemoteDatastoreClient.scala | 2 +- 17 files changed, 147 insertions(+), 208 deletions(-) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 930d95950f2..a63045c5e17 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -337,8 +337,7 @@ class AnnotationIOController @Inject()( else volumeTracing.boundingBox for { - tracingCanHaveSegmentIndex <- canHaveSegmentIndex(organizationId, - dataset.name, + tracingCanHaveSegmentIndex <- canHaveSegmentIndex(dataset._id.toString, fallbackLayerOpt.map(_.name), remoteDataStoreClient) elementClassProto <- fallbackLayerOpt @@ -358,13 +357,12 @@ class AnnotationIOController @Inject()( } private def canHaveSegmentIndex( - organizationId: String, - datasetName: String, + datasetId: String, fallbackLayerName: Option[String], remoteDataStoreClient: WKRemoteDataStoreClient)(implicit ec: ExecutionContext): Fox[Boolean] = fallbackLayerName match { case Some(layerName) => - remoteDataStoreClient.hasSegmentIndexFile(organizationId, datasetName, layerName) + remoteDataStoreClient.hasSegmentIndexFile(datasetId, layerName) case None => Fox.successful(true) } diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 54577752334..d4ff699186b 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -121,6 +121,7 @@ class AnnotationService @Inject()( private def createVolumeTracing( dataSource: DataSource, datasetOrganizationId: String, + datasetId: ObjectId, datasetDataStore: DataStore, fallbackLayer: Option[SegmentationLayer], boundingBox: Option[BoundingBox] = None, @@ -138,7 +139,7 @@ class AnnotationService @Inject()( remoteDatastoreClient = new WKRemoteDataStoreClient(datasetDataStore, rpc) fallbackLayerHasSegmentIndex <- fallbackLayer match { case Some(layer) => - remoteDatastoreClient.hasSegmentIndexFile(datasetOrganizationId, dataSource.id.directoryName, layer.name) + remoteDatastoreClient.hasSegmentIndexFile(datasetId.toString, layer.name) case None => Fox.successful(false) } elementClassProto <- ElementClass @@ -237,6 +238,7 @@ class AnnotationService @Inject()( volumeTracing <- createVolumeTracing( dataSource, dataset._organization, + dataset._id, dataStore, fallbackLayer, magRestrictions = params.magRestrictions.getOrElse(MagRestrictions.empty), @@ -429,6 +431,7 @@ class AnnotationService @Inject()( volumeTracing <- createVolumeTracing( dataSource, dataset._organization, + datasetId, dataStore, fallbackLayer = fallbackLayer, boundingBox = boundingBox.flatMap { box => diff --git a/app/models/dataset/WKRemoteDataStoreClient.scala b/app/models/dataset/WKRemoteDataStoreClient.scala index abecc28931e..37c30cbea31 100644 --- a/app/models/dataset/WKRemoteDataStoreClient.scala +++ b/app/models/dataset/WKRemoteDataStoreClient.scala @@ -21,7 +21,7 @@ import scala.concurrent.duration.DurationInt class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLogging { - private lazy val hasSegmentIndexFileCache: AlfuCache[(String, String, String), Boolean] = + private lazy val hasSegmentIndexFileCache: AlfuCache[(String, String), Boolean] = AlfuCache(timeToLive = 1 minute) def getDataLayerThumbnail(dataset: Dataset, @@ -34,8 +34,7 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin val targetMagBoundingBox = mag1BoundingBox / mag logger.debug( s"Thumbnail called for: ${dataset._id}, organization: ${dataset._organization}, directoryName: ${dataset.directoryName}, Layer: $dataLayerName") - rpc( - s"${dataStore.url}/data/datasets/${urlEncode(dataset._organization)}/${urlEncode(dataset.directoryName)}/layers/$dataLayerName/thumbnail.jpg") + rpc(s"${dataStore.url}/data/datasets/${dataset._id}/layers/$dataLayerName/thumbnail.jpg") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("mag" -> mag.toMagLiteral()) .addQueryString("x" -> mag1BoundingBox.topLeft.x.toString) @@ -58,8 +57,7 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin additionalCoordinates: Option[Seq[AdditionalCoordinate]]): Fox[Array[Byte]] = { val targetMagBoundingBox = mag1BoundingBox / mag logger.debug(s"Fetching raw data. Mag $mag, mag1 bbox: $mag1BoundingBox, target-mag bbox: $targetMagBoundingBox") - rpc( - s"${dataStore.url}/data/datasets/${urlEncode(dataset._organization)}/${urlEncode(dataset.directoryName)}/layers/$layerName/readData") + rpc(s"${dataStore.url}/data/datasets/${dataset._id}/layers/$layerName/readData") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .postJsonWithBytesResponse( RawCuboidRequest(mag1BoundingBox.topLeft, targetMagBoundingBox.size, mag, additionalCoordinates)) @@ -67,7 +65,7 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin def findPositionWithData(dataset: Dataset, dataLayerName: String): Fox[JsObject] = rpc( - s"${dataStore.url}/data/datasets/${dataset._organization}/${dataset.directoryName}/layers/$dataLayerName/findData") + s"${dataStore.url}/data/datasets/${dataset._id}/layers/$dataLayerName/findData") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .getWithJsonResponse[JsObject] @@ -80,13 +78,13 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin .silent .getWithJsonResponse[List[DirectoryStorageReport]] - def hasSegmentIndexFile(organizationId: String, datasetName: String, layerName: String)( + def hasSegmentIndexFile(datasetId: String, layerName: String)( implicit ec: ExecutionContext): Fox[Boolean] = { - val cacheKey = (organizationId, datasetName, layerName) + val cacheKey = (datasetId, layerName) hasSegmentIndexFileCache.getOrLoad( cacheKey, k => - rpc(s"${dataStore.url}/data/datasets/${k._1}/${k._2}/layers/${k._3}/hasSegmentIndex") + rpc(s"${dataStore.url}/data/datasets/${k._1}/layers/${k._2}/hasSegmentIndex") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .silent .getWithJsonResponse[Boolean] @@ -103,7 +101,7 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin def updateDatasetInDSCache(datasetId: String): Fox[Unit] = for { - _ <- rpc(s"${dataStore.url}/data/wkDatasets/$datasetId") + _ <- rpc(s"${dataStore.url}/data/datasets/$datasetId") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .delete() } yield () diff --git a/frontend/javascripts/admin/api/mesh.ts b/frontend/javascripts/admin/api/mesh.ts index d7604fb001d..373c76e6796 100644 --- a/frontend/javascripts/admin/api/mesh.ts +++ b/frontend/javascripts/admin/api/mesh.ts @@ -28,7 +28,7 @@ type ListMeshChunksRequest = { export function getMeshfileChunksForSegment( dataStoreUrl: string, - dataSourceId: APIDataSourceId, + datasetId: string, layerName: string, meshFile: APIMeshFileInfo, segmentId: number, @@ -56,7 +56,7 @@ export function getMeshfileChunksForSegment( segmentId, }; return Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/meshes/chunks?${params}`, + `${dataStoreUrl}/data/datasets/${datasetId}/layers/${layerName}/meshes/chunks?${params}`, { data: payload, showErrorToast: false, @@ -78,13 +78,13 @@ type MeshChunkDataRequestList = { export function getMeshfileChunkData( dataStoreUrl: string, - dataSourceId: APIDataSourceId, + datasetId: string, layerName: string, batchDescription: MeshChunkDataRequestList, ): Promise { return doWithToken(async (token) => { const dracoDataChunks = await Request.sendJSONReceiveArraybuffer( - `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/meshes/chunks/data?token=${token}`, + `${dataStoreUrl}/data/datasets/${datasetId}/layers/${layerName}/meshes/chunks/data?token=${token}`, { data: batchDescription, useWebworkerForArrayBuffer: true, diff --git a/frontend/javascripts/admin/rest_api.ts b/frontend/javascripts/admin/rest_api.ts index 5c781c6ad4c..f8c1e3b1a9c 100644 --- a/frontend/javascripts/admin/rest_api.ts +++ b/frontend/javascripts/admin/rest_api.ts @@ -840,13 +840,12 @@ export async function getAnnotationProto( export function hasSegmentIndexInDataStore( dataStoreUrl: string, - datasetDirectoryName: string, + datasetId: string, dataLayerName: string, - organizationId: string, ) { return doWithToken((token) => Request.receiveJSON( - `${dataStoreUrl}/data/datasets/${organizationId}/${datasetDirectoryName}/layers/${dataLayerName}/hasSegmentIndex?token=${token}`, + `${dataStoreUrl}/data/datasets/${datasetId}/layers/${dataLayerName}/hasSegmentIndex?token=${token}`, ), ); } @@ -977,7 +976,7 @@ export async function getDatasets( export function readDatasetDatasource(dataset: APIDataset): Promise { return doWithToken((token) => Request.receiveJSON( - `${dataset.dataStore.url}/data/datasets/${dataset.owningOrganization}/${dataset.directoryName}/readInboxDataSource?token=${token}`, + `${dataset.dataStore.url}/data/datasets/${dataset.id}/readInboxDataSource?token=${token}`, ), ); } @@ -986,10 +985,11 @@ export async function updateDatasetDatasource( datasetDirectoryName: string, dataStoreUrl: string, datasource: APIDataSource, + datasetId: string ): Promise { await doWithToken((token) => Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${datasource.id.team}/${datasetDirectoryName}?token=${token}`, + `${dataStoreUrl}/data/datasets/${datasetId}?token=${token}`, { data: datasource, method: "PUT", @@ -1330,6 +1330,7 @@ export async function triggerDatasetCheck(datastoreHost: string): Promise export async function triggerDatasetClearCache( datastoreHost: string, dataSourceId: APIDataSourceId, + datasetId: string, layerName?: string, ): Promise { await doWithToken((token) => { @@ -1339,7 +1340,7 @@ export async function triggerDatasetClearCache( params.set("layerName", layerName); } return Request.triggerRequest( - `/data/triggers/reload/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}?${params}`, + `/data/triggers/reload/${dataSourceId.owningOrganization}/${datasetId}?${params}`, { host: datastoreHost, method: "POST", @@ -1371,7 +1372,7 @@ export async function triggerDatasetClearThumbnailCache(datasetId: string): Prom export async function clearCache(dataset: APIMaybeUnimportedDataset, layerName?: string) { return Promise.all([ - triggerDatasetClearCache(dataset.dataStore.url, dataset, layerName), + triggerDatasetClearCache(dataset.dataStore.url, dataset, dataset.id, layerName), triggerDatasetClearThumbnailCache(dataset.id), ]); } @@ -1430,12 +1431,12 @@ export async function findDataPositionForVolumeTracing( export async function getHistogramForLayer( datastoreUrl: string, - dataSourceId: APIDataSourceId, + datasetId: string, layerName: string, ): Promise { return doWithToken((token) => Request.receiveJSON( - `${datastoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/histogram?token=${token}`, + `${datastoreUrl}/data/datasets/${datasetId}/layers/${layerName}/histogram?token=${token}`, { showErrorToast: false }, ), ); diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx index 9099c1ae5f9..311c52a3836 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx @@ -419,7 +419,7 @@ class DatasetSettingsView extends React.PureComponent | null | undefined>> { const state = Store.getState(); const datasetId = state.dataset.id; - const datasetDirectoryName = state.dataset.directoryName; - const organization = state.dataset.owningOrganization; const dataStoreHost = state.dataset.dataStore.url; const tracingStoreHost = state.annotation.tracingStore.url; - // Prefer datasetId (id) if available, otherwise fall back to old method - const getDataStoreUrl = (optLayerName?: string) => - datasetId - ? `${dataStoreHost}/data/wkDatasets/${datasetId}/layers/${optLayerName || layerInfo.name}` - : `${dataStoreHost}/data/datasets/${organization}/${datasetDirectoryName}/layers/${optLayerName || layerInfo.name}`; - + const getDataStoreUrl = (optLayerName?: string) => `${dataStoreHost}/data/datasets/${datasetId}/layers/${optLayerName || layerInfo.name}`; const getTracingStoreUrl = () => `${tracingStoreHost}/tracings/volume/${layerInfo.name}`; const maybeVolumeTracing = diff --git a/frontend/javascripts/viewer/model/sagas/load_histogram_data_saga.ts b/frontend/javascripts/viewer/model/sagas/load_histogram_data_saga.ts index 1fdce225278..9d3bc29c951 100644 --- a/frontend/javascripts/viewer/model/sagas/load_histogram_data_saga.ts +++ b/frontend/javascripts/viewer/model/sagas/load_histogram_data_saga.ts @@ -37,7 +37,7 @@ function* loadHistogramForLayer(layerName: string): Saga { let histogram; try { - histogram = yield* call(getHistogramForLayer, dataset.dataStore.url, dataset, layerName); + histogram = yield* call(getHistogramForLayer, dataset.dataStore.url, dataset.id, layerName); if (!Array.isArray(histogram) || histogram.length === 0) { yield* put(setHistogramDataForLayerAction(layerName, null)); diff --git a/frontend/javascripts/viewer/model/sagas/meshes/precomputed_mesh_saga.ts b/frontend/javascripts/viewer/model/sagas/meshes/precomputed_mesh_saga.ts index d9d709d5fdf..3cfeccd47f9 100644 --- a/frontend/javascripts/viewer/model/sagas/meshes/precomputed_mesh_saga.ts +++ b/frontend/javascripts/viewer/model/sagas/meshes/precomputed_mesh_saga.ts @@ -284,7 +284,7 @@ function* _getChunkLoadingDescriptors( const segmentInfo = yield* call( meshApi.getMeshfileChunksForSegment, dataset.dataStore.url, - dataset, + dataset.id, getBaseSegmentationName(segmentationLayer), meshFile, segmentId, @@ -364,7 +364,7 @@ function* loadPrecomputedMeshesInChunksForLod( const dataForChunks = yield* call( meshApi.getMeshfileChunkData, dataset.dataStore.url, - dataset, + dataset.id, getBaseSegmentationName(segmentationLayer), { meshFileName: meshFile.name, diff --git a/frontend/javascripts/viewer/view/right-border-tabs/segments_tab/segments_view_helper.tsx b/frontend/javascripts/viewer/view/right-border-tabs/segments_tab/segments_view_helper.tsx index 8656b7f2b82..6860691aa9b 100644 --- a/frontend/javascripts/viewer/view/right-border-tabs/segments_tab/segments_view_helper.tsx +++ b/frontend/javascripts/viewer/view/right-border-tabs/segments_tab/segments_view_helper.tsx @@ -76,9 +76,8 @@ export async function hasSegmentIndex( if (maybeVolumeTracing == null) { segmentIndexInDataStore = await hasSegmentIndexInDataStore( dataset.dataStore.url, - dataset.directoryName, + dataset.id, visibleSegmentationLayer.name, - dataset.owningOrganization, ); } return ( diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala index 272c7cabacb..3b3d8862c35 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.datastore.services.mesh.{ @@ -20,7 +21,7 @@ class DSMeshController @Inject()( accessTokenService: DataStoreAccessTokenService, meshFileService: MeshFileService, fullMeshService: DSFullMeshService, - dataSourceRepository: DataSourceRepository, + datasetCache: DatasetCache, val dsRemoteWebknossosClient: DSRemoteWebknossosClient, val dsRemoteTracingstoreClient: DSRemoteTracingstoreClient, val binaryDataServiceHolder: BinaryDataServiceHolder @@ -30,21 +31,17 @@ class DSMeshController @Inject()( override def allowRemoteOrigin: Boolean = true - def listMeshFiles(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = + def listMeshFiles(datasetId: ObjectId, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND meshFileInfos <- meshFileService.listMeshFiles(dataSource.id, dataLayer) } yield Ok(Json.toJson(meshFileInfos)) } } - def listMeshChunksForSegment(organizationId: String, - datasetDirectoryName: String, + def listMeshChunksForSegment(datasetId: ObjectId, dataLayerName: String, /* If targetMappingName is set, assume that meshFile contains meshes for the oversegmentation. Collect mesh chunks of all *unmapped* segment ids @@ -55,12 +52,9 @@ class DSMeshController @Inject()( targetMappingName: Option[String], editableMappingTracingId: Option[String]): Action[ListMeshChunksRequest] = Action.async(validateJson[ListMeshChunksRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND meshFileKey <- meshFileService.lookUpMeshFileKey(dataSource.id, dataLayer, request.body.meshFileName) mappingNameForMeshFile <- meshFileService.mappingNameForMeshFile(meshFileKey) segmentIds: Seq[Long] <- segmentIdsForAgglomerateIdIfNeeded( @@ -77,16 +71,11 @@ class DSMeshController @Inject()( } } - def readMeshChunk(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[MeshChunkDataRequestList] = + def readMeshChunk(datasetId: ObjectId, dataLayerName: String): Action[MeshChunkDataRequestList] = Action.async(validateJson[MeshChunkDataRequestList]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND meshFileKey <- meshFileService.lookUpMeshFileKey(dataSource.id, dataLayer, request.body.meshFileName) (data, encoding) <- meshFileService.readMeshChunk(meshFileKey, request.body.requests) ?~> "mesh.file.loadChunk.failed" } yield { @@ -97,17 +86,12 @@ class DSMeshController @Inject()( } } - def loadFullMeshStl(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[FullMeshRequest] = + def loadFullMeshStl(datasetId: ObjectId, dataLayerName: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - data: Array[Byte] <- fullMeshService.loadFor(organizationId, - datasetDirectoryName, - dataLayerName, - request.body) ?~> "mesh.file.loadChunk.failed" + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND + data: Array[Byte] <- fullMeshService.loadFor(dataSource, dataLayer, request.body) ?~> "mesh.file.loadChunk.failed" } yield Ok(data) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 45e90b315b9..53957d37a23 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -17,19 +17,16 @@ import com.scalableminds.webknossos.datastore.helpers.{ SegmentIndexData, SegmentStatisticsParameters } -import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId, GenericDataSource} import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.datastore.services.mesh.{MeshFileService, MeshMappingHelper} import com.scalableminds.webknossos.datastore.services.segmentindex.SegmentIndexFileService import com.scalableminds.webknossos.datastore.services.uploading._ import com.scalableminds.webknossos.datastore.storage.DataVaultService -import com.scalableminds.util.tools.Box.tryo import com.scalableminds.util.tools.{Box, Empty, Failure, Full} import com.scalableminds.webknossos.datastore.services.mapping.AgglomerateService import play.api.data.Form import play.api.data.Forms.{longNumber, nonEmptyText, number, tuple} -import play.api.i18n.Messages import play.api.libs.Files import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, MultipartFormData, PlayBodyParsers} @@ -63,20 +60,13 @@ class DataSourceController @Inject()( override def allowRemoteOrigin: Boolean = true - def readInboxDataSource(organizationId: String, datasetDirectoryName: String): Action[AnyContent] = - Action.async { implicit request => - { - accessTokenService.validateAccessFromTokenContextForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - // Read directly from file, not from repository to ensure recent changes are seen - val dataSource: InboxDataSource = - dataSourceService.dataSourceFromDir( - dataSourceService.dataBaseDir.resolve(organizationId).resolve(datasetDirectoryName), - organizationId) - Ok(Json.toJson(dataSource)) - } - } + def readInboxDataSource(datasetId: ObjectId): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { + for { + dataSource <- datasetCache.getById(datasetId) ~> NOT_FOUND + } yield Ok(Json.toJson(dataSource)) } + } def triggerInboxCheckBlocking(): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { @@ -260,8 +250,7 @@ class DataSourceController @Inject()( datasetId: ObjectId, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataset(datasetId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateList = agglomerateService.listAgglomeratesFiles(dataSource.id, dataLayer) @@ -275,8 +264,7 @@ class DataSourceController @Inject()( mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataset(datasetId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) @@ -292,8 +280,7 @@ class DataSourceController @Inject()( mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataset(datasetId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) @@ -309,8 +296,7 @@ class DataSourceController @Inject()( mappingName: String, segmentId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataset(datasetId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) @@ -325,8 +311,7 @@ class DataSourceController @Inject()( dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataset(datasetId)){ + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) @@ -340,8 +325,7 @@ class DataSourceController @Inject()( dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataset(datasetId)){ + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) @@ -353,13 +337,12 @@ class DataSourceController @Inject()( } } - def update(organizationId: String, datasetDirectoryName: String): Action[DataSource] = + def update(datasetId: ObjectId): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.writeDataSource(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataset(datasetId)) { for { - dataSource <- dataSourceRepository.get(DataSourceId(datasetDirectoryName, organizationId)).toFox ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND + dataSource <- datasetCache.getById(datasetId) ~> NOT_FOUND + // TODO: This route does not make sense right now. _ <- dataSourceService.updateDataSource(request.body.copy(id = dataSource.id), expectExisting = true) } yield Ok } @@ -408,36 +391,32 @@ class DataSourceController @Inject()( } } - private def clearCachesOfDataSource(organizationId: String, - datasetDirectoryName: String, - layerName: Option[String]): InboxDataSource = { - val dataSourceId = DataSourceId(datasetDirectoryName, organizationId) + private def clearCachesOfDataSource(datasetId: ObjectId, dataSource: DataSource, layerName: Option[String]): Unit = { + val dataSourceId = dataSource.id + val organizationId = dataSourceId.organizationId + val datasetDirectoryName = dataSourceId.directoryName val (closedAgglomerateFileHandleCount, clearedBucketProviderCount, removedChunksCount) = binaryDataServiceHolder.binaryDataService.clearCache(organizationId, datasetDirectoryName, layerName) val closedMeshFileHandleCount = meshFileService.clearCache(dataSourceId, layerName) val closedSegmentIndexFileHandleCount = segmentIndexFileService.clearCache(dataSourceId, layerName) - val reloadedDataSource: InboxDataSource = dataSourceService.dataSourceFromDir( - dataSourceService.dataBaseDir.resolve(organizationId).resolve(datasetDirectoryName), - organizationId) datasetErrorLoggingService.clearForDataset(organizationId, datasetDirectoryName) - val clearedVaultCacheEntriesOpt = dataSourceService.invalidateVaultCache(reloadedDataSource, layerName) + val clearedVaultCacheEntriesOpt = dataSourceService.invalidateVaultCache(dataSource, layerName) clearedVaultCacheEntriesOpt.foreach { clearedVaultCacheEntries => logger.info( s"Cleared caches for ${layerName.map(l => s"layer '$l' of ").getOrElse("")}dataset $organizationId/$datasetDirectoryName: closed $closedAgglomerateFileHandleCount agglomerate file handles, $closedMeshFileHandleCount mesh file handles, $closedSegmentIndexFileHandleCount segment index file handles, removed $clearedBucketProviderCount bucketProviders, $clearedVaultCacheEntries vault cache entries and $removedChunksCount image chunk cache entries.") } - reloadedDataSource } - def reload(organizationId: String, - datasetDirectoryName: String, - layerName: Option[String] = None): Action[AnyContent] = + def reload(organizationId: String, datasetId: ObjectId, layerName: Option[String] = None): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(organizationId)) { - val reloadedDataSource = clearCachesOfDataSource(organizationId, datasetDirectoryName, layerName) for { - _ <- dataSourceRepository.updateDataSource(reloadedDataSource) + dataSource <- datasetCache.getById(datasetId) ~> NOT_FOUND + _ = clearCachesOfDataSource(datasetId, dataSource, layerName) + _ = datasetCache.invalidateCache(datasetId) + reloadedDataSource <- datasetCache.getById(datasetId) } yield Ok(Json.toJson(reloadedDataSource)) } } @@ -446,7 +425,7 @@ class DataSourceController @Inject()( Action.async { implicit request => val dataSourceId = DataSourceId(datasetDirectoryName, organizationId) accessTokenService.validateAccessFromTokenContext(UserAccessRequest.deleteDataSource(dataSourceId)) { - tryo(clearCachesOfDataSource(organizationId, datasetDirectoryName, None)) + //tryo(clearCachesOfDataSource(organizationId, datasetDirectoryName, None)) for { _ <- dataSourceService.deleteOnDisk( organizationId, @@ -557,16 +536,11 @@ class DataSourceController @Inject()( } } - def checkSegmentIndexFile(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[AnyContent] = + def checkSegmentIndexFile(datasetId: ObjectId, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND segmentIndexFileKeyBox <- segmentIndexFileService.lookUpSegmentIndexFileKey(dataSource.id, dataLayer).shiftBox } yield Ok(Json.toJson(segmentIndexFileKeyBox.isDefined)) } @@ -577,17 +551,13 @@ class DataSourceController @Inject()( * * @return List of bucketPositions as positions (not indices) of 32³ buckets in mag */ - def getSegmentIndex(organizationId: String, - datasetDirectoryName: String, + def getSegmentIndex(datasetId: ObjectId, dataLayerName: String, segmentId: String): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND segmentIndexFileKey <- segmentIndexFileService.lookUpSegmentIndexFileKey(dataSource.id, dataLayer) segmentIds <- segmentIdsForAgglomerateIdIfNeeded( dataSource.id, @@ -617,16 +587,11 @@ class DataSourceController @Inject()( * * @return List of bucketPositions as indices of 32³ buckets (in target mag) */ - def querySegmentIndex(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[GetMultipleSegmentIndexParameters] = + def querySegmentIndex(datasetId: ObjectId, dataLayerName: String): Action[GetMultipleSegmentIndexParameters] = Action.async(validateJson[GetMultipleSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND segmentIndexFileKey <- segmentIndexFileService.lookUpSegmentIndexFileKey(dataSource.id, dataLayer) segmentIdsAndBucketPositions <- Fox.serialCombined(request.body.segmentIds) { segmentOrAgglomerateId => for { @@ -650,16 +615,11 @@ class DataSourceController @Inject()( } } - def getSegmentVolume(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[SegmentStatisticsParameters] = + def getSegmentVolume(datasetId: ObjectId, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND segmentIndexFileKey <- segmentIndexFileService.lookUpSegmentIndexFileKey(dataSource.id, dataLayer) agglomerateFileKeyOpt <- Fox.runOptional(request.body.mappingName)( agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, _)) @@ -677,16 +637,11 @@ class DataSourceController @Inject()( } } - def getSegmentBoundingBox(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[SegmentStatisticsParameters] = + def getSegmentBoundingBox(datasetId: ObjectId, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND segmentIndexFileKey <- segmentIndexFileService.lookUpSegmentIndexFileKey(dataSource.id, dataLayer) agglomerateFileKeyOpt <- Fox.runOptional(request.body.mappingName)( agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, _)) @@ -735,7 +690,7 @@ class DataSourceController @Inject()( } def invalidateCache(datasetId: ObjectId): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataset(datasetId.toString)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataset(datasetId)) { datasetCache.invalidateCache(datasetId) Future.successful(Ok) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala index 710e200559a..dc3dee5a132 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala @@ -16,7 +16,7 @@ import com.scalableminds.webknossos.datastore.models.{DataRequest, RawCuboidRequ import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataLayer, DataSourceId, GenericDataSource, SegmentationLayer} import com.scalableminds.webknossos.datastore.models.requests.{DataServiceDataRequest, DataServiceMappingRequest, DataServiceRequestSettings} import com.scalableminds.webknossos.datastore.services.mapping.MappingService -import com.scalableminds.webknossos.datastore.services.mesh.{AdHocMeshRequest, AdHocMeshService, AdHocMeshServiceHolder} +import com.scalableminds.webknossos.datastore.services.mesh.{AdHocMeshRequest, AdHocMeshService, AdHocMeshServiceHolder, DSFullMeshService, FullMeshRequest} import com.scalableminds.webknossos.datastore.services.{BinaryDataService, BinaryDataServiceHolder, DSRemoteTracingstoreClient, DSRemoteWebknossosClient, DataSourceRepository, DataStoreAccessTokenService, FindDataService, UserAccessRequest, ZarrStreamingService} import com.scalableminds.webknossos.datastore.slacknotification.DSSlackNotificationService import play.api.i18n.Messages @@ -39,7 +39,8 @@ class LegacyController @Inject()( slackNotificationService: DSSlackNotificationService, adHocMeshServiceHolder: AdHocMeshServiceHolder, findDataService: FindDataService, - zarrStreamingService: ZarrStreamingService + zarrStreamingService: ZarrStreamingService, + fullMeshService: DSFullMeshService )(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with Zarr3OutputHelper @@ -513,4 +514,23 @@ class LegacyController @Inject()( } } + // MESH ROUTES + + def loadFullMeshStl(organizationId: String, + datasetDirectoryName: String, + dataLayerName: String): Action[FullMeshRequest] = + Action.async(validateJson[FullMeshRequest]) { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + data: Array[Byte] <- fullMeshService.loadFor(dataSource, dataLayer, + request.body) ?~> "mesh.file.loadChunk.failed" + + } yield Ok(data) + } + } + } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala index 1fd5fc91709..12deafa5b61 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala @@ -49,8 +49,8 @@ object UserAccessRequest { def writeDataSource(dataSourceId: DataSourceId): UserAccessRequest = UserAccessRequest(dataSourceId, AccessResourceType.datasource, AccessMode.write) - def writeDataset(datasetId: String): UserAccessRequest = - UserAccessRequest(DataSourceId(datasetId, ""), AccessResourceType.dataset, AccessMode.write) + def writeDataset(datasetId: ObjectId): UserAccessRequest = + UserAccessRequest(DataSourceId(datasetId.toString, ""), AccessResourceType.dataset, AccessMode.write) def readTracing(tracingId: String): UserAccessRequest = UserAccessRequest(DataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.read) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/DSFullMeshService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/DSFullMeshService.scala index c577a4b636a..f153d16944e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/DSFullMeshService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/DSFullMeshService.scala @@ -6,7 +6,7 @@ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.DataStoreConfig -import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, SegmentationLayer} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, SegmentationLayer} import com.scalableminds.webknossos.datastore.models.requests.Cuboid import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, VoxelPosition} import com.scalableminds.webknossos.datastore.services._ @@ -52,29 +52,22 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, (binaryDataService, mappingService, config.Datastore.AdHocMesh.timeout, config.Datastore.AdHocMesh.actorPoolSize) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.dataStoreAdHocMeshService - def loadFor(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, - m: MessagesProvider, - tc: TokenContext): Fox[Array[Byte]] = + def loadFor(dataSource: DataSource, dataLayer: DataLayer, fullMeshRequest: FullMeshRequest)( + implicit ec: ExecutionContext, + m: MessagesProvider, + tc: TokenContext): Fox[Array[Byte]] = if (fullMeshRequest.meshFileName.isDefined) - loadFullMeshFromMeshFile(organizationId, datasetDirectoryName, dataLayerName, fullMeshRequest) + loadFullMeshFromMeshFile(dataSource, dataLayer, fullMeshRequest) else - loadFullMeshFromAdHoc(organizationId, datasetDirectoryName, dataLayerName, fullMeshRequest) + loadFullMeshFromAdHoc(dataSource, dataLayer, fullMeshRequest) - private def loadFullMeshFromAdHoc(organizationId: String, - datasetName: String, - dataLayerName: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, - m: MessagesProvider, - tc: TokenContext): Fox[Array[Byte]] = + private def loadFullMeshFromAdHoc(dataSource: DataSource, dataLayer: DataLayer, fullMeshRequest: FullMeshRequest)( + implicit ec: ExecutionContext, + m: MessagesProvider, + tc: TokenContext): Fox[Array[Byte]] = for { mag <- fullMeshRequest.mag.toFox ?~> "mag.neededForAdHoc" seedPosition <- fullMeshRequest.seedPosition.toFox ?~> "seedPosition.neededForAdHoc" - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, - dataLayerName) segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> "dataLayer.mustBeSegmentation" before = Instant.now verticesForChunks <- getAllAdHocChunks(dataSource, @@ -119,17 +112,12 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, } yield allVertices } - private def loadFullMeshFromMeshFile(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, - m: MessagesProvider, - tc: TokenContext): Fox[Array[Byte]] = + private def loadFullMeshFromMeshFile(dataSource: DataSource, dataLayer: DataLayer, fullMeshRequest: FullMeshRequest)( + implicit ec: ExecutionContext, + m: MessagesProvider, + tc: TokenContext): Fox[Array[Byte]] = for { before <- Instant.nowFox - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) meshFileName <- fullMeshRequest.meshFileName.toFox ?~> "mesh.meshFileName.required" meshFileKey <- meshFileService.lookUpMeshFileKey(dataSource.id, dataLayer, meshFileName) mappingNameForMeshFile <- meshFileService.mappingNameForMeshFile(meshFileKey) diff --git a/webknossos-datastore/conf/datastore.latest.routes b/webknossos-datastore/conf/datastore.latest.routes index 0490c0c3fa0..1ca1bc825cc 100644 --- a/webknossos-datastore/conf/datastore.latest.routes +++ b/webknossos-datastore/conf/datastore.latest.routes @@ -77,10 +77,10 @@ POST /datasets/:datasetId/layers/:dataLayerName/agglomerates/:mappingNa GET /datasets/:datasetId/layers/:dataLayerName/agglomerates/:mappingName/positionForSegment @com.scalableminds.webknossos.datastore.controllers.DataSourceController.positionForSegmentViaAgglomerateFile(datasetId: ObjectId, dataLayerName: String, mappingName: String, segmentId: Long) # Mesh files -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshFiles(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes/chunks @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshChunksForSegment(organizationId: String, datasetDirectoryName: String, dataLayerName: String, targetMappingName: Option[String], editableMappingTracingId: Option[String]) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes/chunks/data @com.scalableminds.webknossos.datastore.controllers.DSMeshController.readMeshChunk(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes/fullMesh.stl @com.scalableminds.webknossos.datastore.controllers.DSMeshController.loadFullMeshStl(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:datasetId/layers/:dataLayerName/meshes @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshFiles(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/meshes/chunks @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshChunksForSegment(datasetId: ObjectId, dataLayerName: String, targetMappingName: Option[String], editableMappingTracingId: Option[String]) +POST /datasets/:datasetId/layers/:dataLayerName/meshes/chunks/data @com.scalableminds.webknossos.datastore.controllers.DSMeshController.readMeshChunk(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/meshes/fullMesh.stl @com.scalableminds.webknossos.datastore.controllers.DSMeshController.loadFullMeshStl(datasetId: ObjectId, dataLayerName: String) # Connectome files GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listConnectomeFiles(organizationId: String, datasetDirectoryName: String, dataLayerName: String) @@ -90,14 +90,14 @@ POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerN POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes/synapses @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsesForAgglomerates(organizationId: String, datasetDirectoryName: String, dataLayerName: String) # Ad-Hoc Meshing -POST /datasets/:datasetId/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(datasetId: ObjectId, dataLayerName: String) # Segment-Index files -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/hasSegmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.checkSegmentIndexFile(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.querySegmentIndex(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentIndex/:segmentId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentIndex(organizationId: String, datasetDirectoryName: String, dataLayerName: String, segmentId: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentStatistics/volume @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentVolume(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentStatistics/boundingBox @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentBoundingBox(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:datasetId/layers/:dataLayerName/hasSegmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.checkSegmentIndexFile(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/segmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.querySegmentIndex(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/segmentIndex/:segmentId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentIndex(datasetId: ObjectId, dataLayerName: String, segmentId: String) +POST /datasets/:datasetId/layers/:dataLayerName/segmentStatistics/volume @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentVolume(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/segmentStatistics/boundingBox @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentBoundingBox(datasetId: ObjectId, dataLayerName: String) # DataSource management GET /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.testChunk(resumableChunkNumber: Int, resumableIdentifier: String) @@ -108,19 +108,19 @@ POST /datasets/reserveManualUpload POST /datasets/finishUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.finishUpload() POST /datasets/cancelUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.cancelUpload() GET /datasets/measureUsedStorage/:organizationId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.measureUsedStorage(organizationId: String, datasetDirectoryName: Option[String]) -GET /datasets/:organizationId/:datasetDirectoryName/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(organizationId: String, datasetDirectoryName: String) -PUT /datasets/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(organizationId: String, datasetDirectoryName: String) +GET /datasets/:datasetId/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(datasetId: ObjectId) +PUT /datasets/:datasetId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(datasetId: ObjectId) POST /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(organizationId: String, datasetName: String, folderId: Option[String]) DELETE /datasets/:organizationId/:datasetDirectoryName/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(organizationId: String, datasetDirectoryName: String) POST /datasets/compose @com.scalableminds.webknossos.datastore.controllers.DataSourceController.compose() POST /datasets/exploreRemote @com.scalableminds.webknossos.datastore.controllers.DataSourceController.exploreRemoteDataset() -DELETE /wkDatasets/:datasetId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.invalidateCache(datasetId: ObjectId) +DELETE /datasets/:datasetId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.invalidateCache(datasetId: ObjectId) # Actions POST /triggers/checkInboxBlocking @com.scalableminds.webknossos.datastore.controllers.DataSourceController.triggerInboxCheckBlocking() POST /triggers/createOrganizationDirectory @com.scalableminds.webknossos.datastore.controllers.DataSourceController.createOrganizationDirectory(organizationId: String) -POST /triggers/reload/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reload(organizationId: String, datasetDirectoryName: String, layerName: Option[String]) +POST /triggers/reload/:organizationId/:datasetId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reload(organizationId: String, datasetId: ObjectId, layerName: Option[String]) # Exports GET /exports/:jobId/download @com.scalableminds.webknossos.datastore.controllers.ExportsController.download(jobId: String) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala index 73d7cdd36fc..cee5081b75b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala @@ -147,7 +147,7 @@ class TSRemoteDatastoreClient @Inject()( for { dataSourceId <- remoteWebknossosClient.getDataSourceIdForAnnotation(annotationId) dataStoreUri <- dataStoreUriWithCache(dataSourceId.organizationId, dataSourceId.directoryName) - result <- rpc( + result <- rpc( // TODO: Use datasetId s"$dataStoreUri/data/datasets/${dataSourceId.organizationId}/${dataSourceId.directoryName}/readInboxDataSource").withTokenFromContext .getWithJsonResponse[InboxDataSource] scale <- result.voxelSizeOpt.toFox ?~> "could not determine voxel size of dataset"