From 11574c1dd2fe96caed6f6250a371473012f8697e Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 2 Jul 2025 11:56:37 +0200 Subject: [PATCH 1/7] WIP: stricter checks on paths in datasources --- .../datastore/controllers/DataSourceController.scala | 1 + .../webknossos/datastore/models/datasource/DataLayer.scala | 3 +++ .../webknossos/datastore/models/datasource/DataSource.scala | 1 + 3 files changed, 5 insertions(+) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 54e941b78fd..da417e6a4ab 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -392,6 +392,7 @@ class DataSourceController @Inject()( Action.async(validateJson[DataSource]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { for { + _ <- Fox.fromBool(request.body.allExplicitPaths.forall(_ => true)) // TODO check reservedAdditionalInfo <- dsRemoteWebknossosClient.reserveDataSourceUpload( ReserveUploadInformation( uploadId = "", // Set by core backend diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala index 3dd8f210964..13d44726214 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala @@ -275,6 +275,9 @@ trait DataLayerLike { case _ => None } + def allExplicitPaths: Seq[String] = + magsOpt.map(_.flatMap(_.path)).orElse(wkwResolutionsOpt.map(_.flatMap(_.path))).getOrElse(Seq.empty) ++ + attachments.map(_.allAttachments.map(_.path.toString)).getOrElse(Seq.empty) } object DataLayerLike { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala index ec2f87eb350..aaa24fac3ef 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala @@ -65,6 +65,7 @@ package object datasource { def withUpdatedId(newId: DataSourceId): GenericDataSource[T] = copy(id = newId) + def allExplicitPaths: Seq[String] = dataLayers.flatMap(_.allExplicitPaths) } object GenericDataSource { From 478b4aa015406ad6fbd0b5fcb14d5dcc0e699bcc Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 3 Jul 2025 11:12:49 +0200 Subject: [PATCH 2/7] WIP path checks --- .../datastore/controllers/DataSourceController.scala | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index da417e6a4ab..68d80af0b3e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -33,6 +33,7 @@ import play.api.mvc.{Action, AnyContent, MultipartFormData, PlayBodyParsers} import java.io.File import java.net.URI +import java.nio.file.Path import scala.collection.mutable.ListBuffer import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} @@ -387,6 +388,15 @@ class DataSourceController @Inject()( } } + private def pathIsLocal(pathLiteral: String) = { + val uri = new URI(pathLiteral) + uri.getScheme == null || uri.getScheme == DataVaultService.schemeFile + } + + private def pathIsDataSourceLocal(pathLiteral: String) = + pathIsLocal(pathLiteral) && Path.of(pathLiteral).normalize() + private def pathMatchesGlobalCredentials(path: String) = ??? + // Stores a remote dataset in the database. def add(organizationId: String, datasetName: String, folderId: Option[String]): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => From a30e824af691debcfb0708f4edc9ae4c82d564d0 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 3 Jul 2025 11:39:00 +0200 Subject: [PATCH 3/7] test local, datasourcelocal, matchesGlobalCredentials --- .../controllers/DataSourceController.scala | 11 +------- .../RemoteSourceDescriptorService.scala | 27 ++++++++++++++----- 2 files changed, 21 insertions(+), 17 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 69b3053333f..8f1411e8f8c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -35,7 +35,7 @@ import play.api.mvc.{Action, AnyContent, MultipartFormData, PlayBodyParsers} import java.io.File import java.net.URI -import java.nio.file.Path +import java.nio.file.{Path, Paths} import scala.collection.mutable.ListBuffer import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} @@ -385,15 +385,6 @@ class DataSourceController @Inject()( } } - private def pathIsLocal(pathLiteral: String) = { - val uri = new URI(pathLiteral) - uri.getScheme == null || uri.getScheme == DataVaultService.schemeFile - } - - private def pathIsDataSourceLocal(pathLiteral: String) = - pathIsLocal(pathLiteral) && Path.of(pathLiteral).normalize() - private def pathMatchesGlobalCredentials(path: String) = ??? - // Stores a remote dataset in the database. def add(organizationId: String, datasetName: String, folderId: Option[String]): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala index 758a8bbe41f..4b45ca78da4 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala @@ -126,11 +126,8 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote res } - private def findGlobalCredentialFor(pathOpt: Option[String])(implicit ec: ExecutionContext) = - pathOpt match { - case Some(magPath) => globalCredentials.find(c => magPath.startsWith(c.name)).toFox - case None => Fox.empty - } + private def findGlobalCredentialFor(pathOpt: Option[String]): Option[DataVaultCredential] = + pathOpt.flatMap(path => globalCredentials.find(c => path.startsWith(c.name))) private def credentialFor(magLocator: MagLocator)(implicit ec: ExecutionContext): Fox[DataVaultCredential] = magLocator.credentialId match { @@ -139,7 +136,7 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote case None => magLocator.credentials match { case Some(credential) => Fox.successful(credential) - case None => findGlobalCredentialFor(magLocator.path) + case None => findGlobalCredentialFor(magLocator.path).toFox } } @@ -148,6 +145,22 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote case Some(credentialId) => dSRemoteWebknossosClient.getCredential(credentialId) case None => - findGlobalCredentialFor(Some(attachment.path.toString)) + findGlobalCredentialFor(Some(attachment.path.toString)).toFox } + + private def pathIsLocal(pathLiteral: String): Boolean = { + val uri = new URI(pathLiteral) + uri.getScheme == null || uri.getScheme == DataVaultService.schemeFile + } + + private def pathIsDataSourceLocal(pathLiteral: String): Boolean = + pathIsLocal(pathLiteral) && { + val path = Path.of(pathLiteral) + val workingDir = Path.of(".").toAbsolutePath.normalize + val inWorkingDir = workingDir.resolve(path).toAbsolutePath.normalize + !path.isAbsolute && inWorkingDir.startsWith(workingDir) + } + + private def pathMatchesGlobalCredentials(pathLiteral: String): Boolean = + findGlobalCredentialFor(Some(pathLiteral)).isDefined } From c8af42a59b1bba595b72968cb7985c605a6877cd Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 3 Jul 2025 11:46:23 +0200 Subject: [PATCH 4/7] use check in add route --- conf/messages | 1 + .../datastore/controllers/DataSourceController.scala | 8 +++++--- .../datastore/storage/RemoteSourceDescriptorService.scala | 6 ++++++ 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/conf/messages b/conf/messages index 09231959e10..b9fdc3443f2 100644 --- a/conf/messages +++ b/conf/messages @@ -126,6 +126,7 @@ dataVault.setup.failed=Failed to set up remote file system dataVault.getPath.failed=Failed to get remote path dataSource.notFound=Datasource not found on datastore server. Might still be initializing. +dataSource.add.pathsNotAllowed=Cannot directly add a datasource with local paths that leave the dataset, or with paths that match the WEBKNOSSOS object storage. dataStore.list.failed=Failed to retrieve list of data stores. dataStore.notFound=DataStore not found. diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 8f1411e8f8c..b3abe551985 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -22,7 +22,7 @@ import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.datastore.services.mesh.{MeshFileService, MeshMappingHelper} import com.scalableminds.webknossos.datastore.services.segmentindex.SegmentIndexFileService import com.scalableminds.webknossos.datastore.services.uploading._ -import com.scalableminds.webknossos.datastore.storage.DataVaultService +import com.scalableminds.webknossos.datastore.storage.{DataVaultService, RemoteSourceDescriptorService} import com.scalableminds.util.tools.Box.tryo import com.scalableminds.util.tools.{Box, Empty, Failure, Full} import com.scalableminds.webknossos.datastore.services.mapping.AgglomerateService @@ -35,7 +35,6 @@ import play.api.mvc.{Action, AnyContent, MultipartFormData, PlayBodyParsers} import java.io.File import java.net.URI -import java.nio.file.{Path, Paths} import scala.collection.mutable.ListBuffer import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} @@ -51,6 +50,7 @@ class DataSourceController @Inject()( agglomerateService: AgglomerateService, storageUsageService: DSUsedStorageService, datasetErrorLoggingService: DSDatasetErrorLoggingService, + remoteSourceDescriptorService: RemoteSourceDescriptorService, exploreRemoteLayerService: ExploreRemoteLayerService, uploadService: UploadService, composeService: ComposeService, @@ -390,7 +390,9 @@ class DataSourceController @Inject()( Action.async(validateJson[DataSource]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { for { - _ <- Fox.fromBool(request.body.allExplicitPaths.forall(_ => true)) // TODO check + _ <- Fox.fromBool( + request.body.allExplicitPaths + .forall(remoteSourceDescriptorService.pathIsAllowedToAddDirectly)) ?~> "dataSource.add.pathsNotAllowed" reservedAdditionalInfo <- dsRemoteWebknossosClient.reserveDataSourceUpload( ReserveUploadInformation( uploadId = "", // Set by core backend diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala index 4b45ca78da4..e368ed3da32 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala @@ -148,6 +148,12 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote findGlobalCredentialFor(Some(attachment.path.toString)).toFox } + def pathIsAllowedToAddDirectly(pathLiteral: String): Boolean = + if (pathIsLocal(pathLiteral)) + pathIsDataSourceLocal(pathLiteral) + else + !pathMatchesGlobalCredentials(pathLiteral) + private def pathIsLocal(pathLiteral: String): Boolean = { val uri = new URI(pathLiteral) uri.getScheme == null || uri.getScheme == DataVaultService.schemeFile From 8f9cd16fb48444aa6dcd4f88262054024420c577 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 3 Jul 2025 11:56:29 +0200 Subject: [PATCH 5/7] replace Paths.get by newer Path.of --- test/e2e/End2EndSpec.scala | 4 ++-- .../com/scalableminds/util/io/PathUtils.scala | 10 +++++----- .../com/scalableminds/util/io/ZipIO.scala | 18 +++++++++--------- .../controllers/ExportsController.scala | 4 ++-- .../datavault/FileSystemDataVault.scala | 4 ++-- .../services/BinaryDataServiceHolder.scala | 4 ++-- .../services/ConnectomeFileService.scala | 6 +++--- .../services/DSUsedStorageService.scala | 4 ++-- .../datastore/services/DataSourceService.scala | 10 +++++----- .../services/mapping/AgglomerateService.scala | 4 ++-- .../services/mapping/MappingService.scala | 4 ++-- .../services/mesh/Hdf5MeshFileService.scala | 4 ++-- .../services/mesh/MeshFileService.scala | 4 ++-- .../Hdf5SegmentIndexFileService.scala | 4 ++-- .../segmentindex/SegmentIndexFileService.scala | 4 ++-- .../RemoteSourceDescriptorService.scala | 6 +++--- .../tracingstore/files/TempFileService.scala | 4 ++-- .../EditableMappingService.scala | 4 ++-- .../tracings/volume/VolumeTracingService.scala | 4 ++-- 19 files changed, 53 insertions(+), 53 deletions(-) diff --git a/test/e2e/End2EndSpec.scala b/test/e2e/End2EndSpec.scala index a6572ed0f36..9dfe9e9a898 100644 --- a/test/e2e/End2EndSpec.scala +++ b/test/e2e/End2EndSpec.scala @@ -10,7 +10,7 @@ import play.api.libs.ws.{WSClient, WSResponse} import play.api.test.WithServer import java.io.File -import java.nio.file.Paths +import java.nio.file.Path import scala.concurrent.Await import scala.concurrent.duration._ import scala.sys.process._ @@ -64,7 +64,7 @@ class End2EndSpec(arguments: Arguments) extends Specification with GuiceFakeAppl if (!dataDirectory.listFiles().exists(_.getName == "test-dataset")) ZipIO.unzipToDirectory( testDatasetZip, - Paths.get(dataDirectory.toPath.toString, "test-dataset"), + Path.of(dataDirectory.toPath.toString, "test-dataset"), includeHiddenFiles = true, hiddenFilesWhitelist = List(), truncateCommonPrefix = true, diff --git a/util/src/main/scala/com/scalableminds/util/io/PathUtils.scala b/util/src/main/scala/com/scalableminds/util/io/PathUtils.scala index a7bea38f9b4..264739a12be 100644 --- a/util/src/main/scala/com/scalableminds/util/io/PathUtils.scala +++ b/util/src/main/scala/com/scalableminds/util/io/PathUtils.scala @@ -1,7 +1,7 @@ package com.scalableminds.util.io import java.io.File -import java.nio.file.{Path, _} +import java.nio.file._ import com.typesafe.scalalogging.LazyLogging import com.scalableminds.util.tools.Box.tryo import com.scalableminds.util.tools.{Box, Failure, Full} @@ -42,7 +42,7 @@ trait PathUtils extends LazyLogging { val elements = p1.iterator.asScala.zip(p2.iterator.asScala).takeWhile(Function.tupled(_ == _)).map(_._1) val joined = elements.mkString("/") val absoluteIfNeeded = if (p1.startsWith("/")) f"/$joined" else joined - Paths.get(absoluteIfNeeded) + Path.of(absoluteIfNeeded) } def commonPrefix(ps: List[Path]): Path = @@ -162,14 +162,14 @@ trait PathUtils extends LazyLogging { lastCutOffIndex match { case -1 => path // subpath(0, 0) is forbidden, therefore we handle this special case ourselves - case 0 => Paths.get("") + case 0 => Path.of("") case i => path.subpath(0, i) } } // Remove a single file name from previously computed common prefix def removeSingleFileNameFromPrefix(prefix: Path, fileNames: List[String]): Path = { - def isFileNameInPrefix(prefix: Path, fileName: String) = prefix.endsWith(Paths.get(fileName).getFileName) + def isFileNameInPrefix(prefix: Path, fileName: String) = prefix.endsWith(Path.of(fileName).getFileName) fileNames match { case head :: tail if tail.isEmpty && isFileNameInPrefix(prefix, head) => @@ -180,7 +180,7 @@ trait PathUtils extends LazyLogging { private def removeOneName(path: Path): Path = if (path.getNameCount == 1) { - Paths.get("") + Path.of("") } else path.getParent def deleteDirectoryRecursively(path: Path): Box[Unit] = { diff --git a/util/src/main/scala/com/scalableminds/util/io/ZipIO.scala b/util/src/main/scala/com/scalableminds/util/io/ZipIO.scala index 56e8965aba0..8a62df9d1b9 100644 --- a/util/src/main/scala/com/scalableminds/util/io/ZipIO.scala +++ b/util/src/main/scala/com/scalableminds/util/io/ZipIO.scala @@ -1,7 +1,7 @@ package com.scalableminds.util.io import java.io._ -import java.nio.file.{Files, Path, Paths} +import java.nio.file.{Files, Path} import java.util.zip.{GZIPOutputStream => DefaultGZIPOutputStream, _} import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} import com.typesafe.scalalogging.LazyLogging @@ -178,23 +178,23 @@ object ZipIO extends LazyLogging with FoxImplicits { val zipEntries = zip.entries.asScala.filter { e: ZipEntry => !e.isDirectory && (includeHiddenFiles || !isFileHidden(e) || hiddenFilesWhitelist.contains( - Paths.get(e.getName).getFileName.toString)) + Path.of(e.getName).getFileName.toString)) }.toList val commonPrefix = if (truncateCommonPrefix) { - val commonPrefixNotFixed = PathUtils.commonPrefix(zipEntries.map(e => Paths.get(e.getName))) + val commonPrefixNotFixed = PathUtils.commonPrefix(zipEntries.map(e => Path.of(e.getName))) val strippedPrefix = PathUtils.cutOffPathAtLastOccurrenceOf(commonPrefixNotFixed, excludeFromPrefix.getOrElse(List.empty)) PathUtils.removeSingleFileNameFromPrefix(strippedPrefix, zipEntries.map(_.getName)) } else { - Paths.get("") + Path.of("") } val resultFox = zipEntries.foldLeft[Fox[List[A]]](Fox.successful(List.empty)) { (results, entry) => results.shiftBox.map { case Full(rs) => val input: InputStream = zip.getInputStream(entry) - val path = commonPrefix.relativize(Paths.get(entry.getName)) + val path = commonPrefix.relativize(Path.of(entry.getName)) val innerResultFox: Fox[List[A]] = Fox.fromFutureBox(f(path, input).futureBox.map { case Full(result) => input.close() @@ -230,16 +230,16 @@ object ZipIO extends LazyLogging with FoxImplicits { val zipEntries = zip.entries.asScala.filter { e: ZipEntry => !e.isDirectory && (includeHiddenFiles || !isFileHidden(e) || hiddenFilesWhitelist.contains( - Paths.get(e.getName).getFileName.toString)) + Path.of(e.getName).getFileName.toString)) }.toList val commonPrefix = if (truncateCommonPrefix) { - val commonPrefixNotFixed = PathUtils.commonPrefix(zipEntries.map(e => Paths.get(e.getName))) + val commonPrefixNotFixed = PathUtils.commonPrefix(zipEntries.map(e => Path.of(e.getName))) val strippedPrefix = PathUtils.cutOffPathAtLastOccurrenceOf(commonPrefixNotFixed, excludeFromPrefix.getOrElse(List.empty)) PathUtils.removeSingleFileNameFromPrefix(strippedPrefix, zipEntries.map(_.getName)) } else { - Paths.get("") + Path.of("") } val result = zipEntries.foldLeft[Box[List[A]]](Full(Nil)) { (results, entry) => @@ -248,7 +248,7 @@ object ZipIO extends LazyLogging with FoxImplicits { var input: InputStream = null try { input = zip.getInputStream(entry) - val path = commonPrefix.relativize(Paths.get(entry.getName)) + val path = commonPrefix.relativize(Path.of(entry.getName)) val r = f(path, input) match { case Full(result) => Full(rs :+ result) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala index f348fa289c5..d1920922227 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala @@ -1,6 +1,6 @@ package com.scalableminds.webknossos.datastore.controllers -import java.nio.file.{Files, Path, Paths} +import java.nio.file.{Files, Path} import com.google.inject.Inject import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.DataStoreConfig @@ -30,7 +30,7 @@ class ExportsController @Inject()(webknossosClient: DSRemoteWebknossosClient, extends Controller with FoxImplicits { - private val dataBaseDir: Path = Paths.get(config.Datastore.baseDirectory) + private val dataBaseDir: Path = Path.of(config.Datastore.baseDirectory) override def allowRemoteOrigin: Boolean = true diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala index 85a05da9162..9304fb16758 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala @@ -8,7 +8,7 @@ import org.apache.commons.lang3.builder.HashCodeBuilder import java.nio.ByteBuffer import java.nio.channels.{AsynchronousFileChannel, CompletionHandler} -import java.nio.file.{Files, Path, Paths, StandardOpenOption} +import java.nio.file.{Files, Path, StandardOpenOption} import java.util.stream.Collectors import scala.concurrent.{ExecutionContext, Promise} import scala.jdk.CollectionConverters._ @@ -96,7 +96,7 @@ class FileSystemDataVault extends DataVault { for { _ <- Fox.fromBool(uri.getScheme == DataVaultService.schemeFile) ?~> "trying to read from FileSystemDataVault, but uri scheme is not file" _ <- Fox.fromBool(uri.getHost == null || uri.getHost.isEmpty) ?~> s"trying to read from FileSystemDataVault, but hostname ${uri.getHost} is non-empty" - localPath = Paths.get(uri.getPath) + localPath = Path.of(uri.getPath) _ <- Fox.fromBool(localPath.isAbsolute) ?~> "trying to read from FileSystemDataVault, but hostname is non-empty" } yield localPath } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala index 813fd5bc347..ce1e609c604 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataServiceHolder.scala @@ -1,6 +1,6 @@ package com.scalableminds.webknossos.datastore.services -import java.nio.file.Paths +import java.nio.file.Path import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.services.mapping.AgglomerateService import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService @@ -23,7 +23,7 @@ class BinaryDataServiceHolder @Inject()(config: DataStoreConfig, agglomerateService: AgglomerateService)(implicit ec: ExecutionContext) { val binaryDataService: BinaryDataService = new BinaryDataService( - Paths.get(config.Datastore.baseDirectory), + Path.of(config.Datastore.baseDirectory), Some(agglomerateService), Some(remoteSourceDescriptorService), Some(chunkCacheService.sharedChunkContentsCache), diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ConnectomeFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ConnectomeFileService.scala index 3a24c199568..682d8b7a918 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ConnectomeFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ConnectomeFileService.scala @@ -1,7 +1,7 @@ package com.scalableminds.webknossos.datastore.services import java.io.File -import java.nio.file.{Path, Paths} +import java.nio.file.Path import com.scalableminds.util.io.PathUtils import com.scalableminds.util.tools.{Fox, JsonHelper, FoxImplicits} import com.scalableminds.webknossos.datastore.DataStoreConfig @@ -84,7 +84,7 @@ class ConnectomeFileService @Inject()(config: DataStoreConfig)(implicit ec: Exec extends FoxImplicits with LazyLogging { - private val dataBaseDir = Paths.get(config.Datastore.baseDirectory) + private val dataBaseDir = Path.of(config.Datastore.baseDirectory) private val connectomesDir = "connectomes" private val connectomeFileExtension = "hdf5" @@ -253,7 +253,7 @@ class ConnectomeFileService @Inject()(config: DataStoreConfig)(implicit ec: Exec } yield SynapseTypesWithLegend(synapseTypes, typeNames) private def typeNamesForSynapsesOrEmpty(connectomeFilePath: Path): List[String] = { - val typeNamesPath = Paths.get(s"${connectomeFilePath.toString.dropRight(connectomeFileExtension.length)}json") + val typeNamesPath = Path.of(s"${connectomeFilePath.toString.dropRight(connectomeFileExtension.length)}json") if (new File(typeNamesPath.toString).exists()) { JsonHelper.parseFromFileAs[ConnectomeLegend](typeNamesPath, typeNamesPath.getParent) match { case Full(connectomeLegend) => connectomeLegend.synapse_type_names diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSUsedStorageService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSUsedStorageService.scala index d7e208efc22..d877424f042 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSUsedStorageService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSUsedStorageService.scala @@ -10,7 +10,7 @@ import com.scalableminds.util.tools.Box.tryo import org.apache.commons.io.FileUtils import play.api.libs.json.{Json, OFormat} -import java.nio.file.{Files, Path, Paths} +import java.nio.file.{Files, Path} import javax.inject.Inject import scala.concurrent.ExecutionContext @@ -30,7 +30,7 @@ class DSUsedStorageService @Inject()(config: DataStoreConfig)(implicit ec: Execu extends FoxImplicits with LazyLogging { - private val baseDir: Path = Paths.get(config.Datastore.baseDirectory) + private val baseDir: Path = Path.of(config.Datastore.baseDirectory) private def noSymlinksFilter(p: Path) = !Files.isSymbolicLink(p) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala index 2fcc38e2641..bb37c5f2f6b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala @@ -22,7 +22,7 @@ import play.api.libs.json.Json import java.io.{File, FileWriter} import java.net.URI -import java.nio.file.{Files, Path, Paths} +import java.nio.file.{Files, Path} import scala.concurrent.ExecutionContext import scala.concurrent.duration._ import scala.io.Source @@ -46,10 +46,10 @@ class DataSourceService @Inject()( override protected def tickerInitialDelay: FiniteDuration = config.Datastore.WatchFileSystem.initialDelay - val dataBaseDir: Path = Paths.get(config.Datastore.baseDirectory) + val dataBaseDir: Path = Path.of(config.Datastore.baseDirectory) - private val propertiesFileName = Paths.get(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) - private val logFileName = Paths.get("datasource-properties-backups.log") + private val propertiesFileName = Path.of(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) + private val logFileName = Path.of("datasource-properties-backups.log") private var inboxCheckVerboseCounter = 0 @@ -138,7 +138,7 @@ class DataSourceService @Inject()( if (isRemote) { MagPathInfo(dataLayer.name, mag.mag, magURI.toString, magURI.toString, hasLocalData = false) } else { - val magPath = Paths.get(magURI) + val magPath = Path.of(magURI) val realPath = magPath.toRealPath() // Does this dataset have local data, i.e. the data that is referenced by the mag path is within the dataset directory val isLocal = realPath.startsWith(datasetPath.toAbsolutePath) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mapping/AgglomerateService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mapping/AgglomerateService.scala index 83129eef903..2ec1264e947 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mapping/AgglomerateService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mapping/AgglomerateService.scala @@ -21,7 +21,7 @@ import com.scalableminds.webknossos.datastore.storage.{AgglomerateFileKey, Remot import com.typesafe.scalalogging.LazyLogging import org.apache.commons.io.FilenameUtils -import java.nio.file.Paths +import java.nio.file.Path import javax.inject.Inject import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt @@ -34,7 +34,7 @@ class AgglomerateService @Inject()(config: DataStoreConfig, with FoxImplicits { private val localAgglomeratesDir = "agglomerates" private val hdf5AgglomerateFileExtension = "hdf5" - private val dataBaseDir = Paths.get(config.Datastore.baseDirectory) + private val dataBaseDir = Path.of(config.Datastore.baseDirectory) private val agglomerateFileKeyCache : AlfuCache[(DataSourceId, String, String), AgglomerateFileKey] = AlfuCache() // dataSourceId, layerName, mappingName → AgglomerateFileKey diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mapping/MappingService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mapping/MappingService.scala index c93f456f54b..0234fb49bc5 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mapping/MappingService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mapping/MappingService.scala @@ -7,7 +7,7 @@ import com.scalableminds.webknossos.datastore.models.requests.{DataServiceMappin import com.scalableminds.webknossos.datastore.storage.ParsedMappingCache import com.typesafe.scalalogging.LazyLogging -import java.nio.file.Paths +import java.nio.file.Path import javax.inject.Inject import scala.concurrent.ExecutionContext import scala.reflect.ClassTag @@ -20,7 +20,7 @@ class MappingService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionCo def handleMappingRequest(request: DataServiceMappingRequest): Fox[Array[Byte]] = { val readInstruction = - MappingReadInstruction(Paths.get(config.Datastore.baseDirectory), + MappingReadInstruction(Path.of(config.Datastore.baseDirectory), request.dataSourceIdOrVolumeDummy, request.mapping) request.dataLayer.mappingProvider.load(readInstruction).toFox diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/Hdf5MeshFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/Hdf5MeshFileService.scala index 6bf04a98c81..a2bc2c505eb 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/Hdf5MeshFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/Hdf5MeshFileService.scala @@ -9,7 +9,7 @@ import com.scalableminds.webknossos.datastore.storage.{CachedHdf5File, Hdf5FileC import jakarta.inject.Inject import play.api.i18n.{Messages, MessagesProvider} -import java.nio.file.Paths +import java.nio.file.Path import scala.concurrent.ExecutionContext class Hdf5MeshFileService @Inject()(config: DataStoreConfig) @@ -17,7 +17,7 @@ class Hdf5MeshFileService @Inject()(config: DataStoreConfig) with MeshFileUtils with FoxImplicits { - private val dataBaseDir = Paths.get(config.Datastore.baseDirectory) + private val dataBaseDir = Path.of(config.Datastore.baseDirectory) private lazy val fileHandleCache = new Hdf5FileCache(30) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/MeshFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/MeshFileService.scala index 2d8a9c044cb..f30cf483f5a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/MeshFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/MeshFileService.scala @@ -19,7 +19,7 @@ import org.apache.commons.io.FilenameUtils import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.json.{Json, OFormat} -import java.nio.file.Paths +import java.nio.file.Path import javax.inject.Inject import scala.concurrent.ExecutionContext @@ -72,7 +72,7 @@ class MeshFileService @Inject()(config: DataStoreConfig, extends FoxImplicits with ArrayArtifactHashing { - private val dataBaseDir = Paths.get(config.Datastore.baseDirectory) + private val dataBaseDir = Path.of(config.Datastore.baseDirectory) private val localMeshesDir = "meshes" private val hdf5MeshFileExtension = "hdf5" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/segmentindex/Hdf5SegmentIndexFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/segmentindex/Hdf5SegmentIndexFileService.scala index baab0ba0c89..3c46e3b9232 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/segmentindex/Hdf5SegmentIndexFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/segmentindex/Hdf5SegmentIndexFileService.scala @@ -7,13 +7,13 @@ import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.storage.{CachedHdf5File, Hdf5FileCache} -import java.nio.file.Paths +import java.nio.file.Path import javax.inject.Inject import scala.concurrent.ExecutionContext class Hdf5SegmentIndexFileService @Inject()(config: DataStoreConfig) extends FoxImplicits with SegmentIndexFileUtils { - private val dataBaseDir = Paths.get(config.Datastore.baseDirectory) + private val dataBaseDir = Path.of(config.Datastore.baseDirectory) private lazy val fileHandleCache = new Hdf5FileCache(100) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/segmentindex/SegmentIndexFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/segmentindex/SegmentIndexFileService.scala index f63e66d6ceb..f2e5c0607cc 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/segmentindex/SegmentIndexFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/segmentindex/SegmentIndexFileService.scala @@ -26,7 +26,7 @@ import com.scalableminds.webknossos.datastore.services.mapping.AgglomerateServic import com.scalableminds.webknossos.datastore.services.{ArrayArtifactHashing, BinaryDataServiceHolder} import com.scalableminds.webknossos.datastore.storage.{AgglomerateFileKey, RemoteSourceDescriptorService} -import java.nio.file.{Path, Paths} +import java.nio.file.Path import javax.inject.Inject import scala.concurrent.ExecutionContext @@ -41,7 +41,7 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, extends FoxImplicits with ArrayArtifactHashing with SegmentStatistics { - private val dataBaseDir = Paths.get(config.Datastore.baseDirectory) + private val dataBaseDir = Path.of(config.Datastore.baseDirectory) private val localSegmentIndexDir = "segmentIndex" private val hdf5SegmentIndexFileExtension = "hdf5" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala index e368ed3da32..ca006de6fb9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala @@ -11,7 +11,7 @@ import com.scalableminds.util.tools.Box import com.scalableminds.util.tools.Box.tryo import java.net.URI -import java.nio.file.{Path, Paths} +import java.nio.file.Path import javax.inject.Inject import scala.concurrent.ExecutionContext @@ -68,7 +68,7 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote if (DataVaultService.isRemoteScheme(uri.getScheme)) { uri } else if (uri.getScheme == null || uri.getScheme == DataVaultService.schemeFile) { - val localPath = Paths.get(uri.getPath) + val localPath = Path.of(uri.getPath) if (localPath.isAbsolute) { if (localPath.toString.startsWith(localDatasetDir.getParent.toAbsolutePath.toString) || dataStoreConfig.Datastore.localDirectoryWhitelist .exists(whitelistEntry => localPath.toString.startsWith(whitelistEntry))) @@ -114,7 +114,7 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote if (DataVaultService.isRemoteScheme(uri.getScheme)) { uri } else { - Paths.get(uri.getPath).toAbsolutePath.toUri + Path.of(uri.getPath).toAbsolutePath.toUri } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/files/TempFileService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/files/TempFileService.scala index 5b73ce5ae3c..d9da63f02bf 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/files/TempFileService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/files/TempFileService.scala @@ -7,7 +7,7 @@ import com.typesafe.scalalogging.LazyLogging import com.scalableminds.util.tools.Box.tryo import org.apache.commons.io.FileUtils -import java.nio.file.{Files, Path, Paths} +import java.nio.file.{Files, Path} import scala.concurrent.ExecutionContext import scala.concurrent.duration.{DurationInt, FiniteDuration} import scala.util.Random @@ -21,7 +21,7 @@ trait TempFileService extends LazyLogging { implicit protected def ec: ExecutionContext protected def moduleName: String - private val tmpDir: Path = Paths.get(System.getProperty("java.io.tmpdir")).resolve(s"${moduleName}-tempfiles") + private val tmpDir: Path = Path.of(System.getProperty("java.io.tmpdir")).resolve(s"$moduleName-tempfiles") private val activeTempFiles = scala.collection.mutable.Set[(Path, Instant)]() diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 4991972d234..405205961fd 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -41,7 +41,7 @@ import org.jgrapht.alg.flow.PushRelabelMFImpl import org.jgrapht.graph.{DefaultWeightedEdge, SimpleWeightedGraph} import play.api.libs.json.{JsObject, Json, OFormat} -import java.nio.file.Paths +import java.nio.file.Path import java.util import scala.concurrent.ExecutionContext import scala.concurrent.duration._ @@ -107,7 +107,7 @@ class EditableMappingService @Inject()( val defaultSegmentToAgglomerateChunkSize: Int = 64 * 1024 // max. 1 MiB chunks (two 8-byte numbers per element) - private val binaryDataService = new BinaryDataService(Paths.get(""), None, None, None, datasetErrorLoggingService) + private val binaryDataService = new BinaryDataService(Path.of(""), None, None, None, datasetErrorLoggingService) adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) private val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index c4dbb3335e0..4b851463fb0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -30,7 +30,7 @@ import com.scalableminds.util.tools.{Box, Empty, Failure, Full} import play.api.i18n.{Messages, MessagesProvider} import java.io._ -import java.nio.file.{Path, Paths} +import java.nio.file.Path import java.util.Base64 import java.util.zip.Deflater import scala.collection.mutable @@ -72,7 +72,7 @@ class VolumeTracingService @Inject()( /* We want to reuse the bucket loading methods from binaryDataService for the volume tracings, however, it does not actually load anything from disk, unlike its “normal” instance in the datastore (only from the volume tracing store) */ - private val binaryDataService = new BinaryDataService(Paths.get(""), None, None, None, datasetErrorLoggingService) + private val binaryDataService = new BinaryDataService(Path.of(""), None, None, None, datasetErrorLoggingService) adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService From 91bc2407cfe205b716519a29e78fc67b24a0877e Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 3 Jul 2025 13:39:04 +0200 Subject: [PATCH 6/7] same checks for upload --- .../services/uploading/UploadService.scala | 16 ++++++++++++++-- .../storage/RemoteSourceDescriptorService.scala | 7 ++++++- 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala index 0414fa469c7..b400ca88db9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala @@ -22,7 +22,7 @@ import com.scalableminds.webknossos.datastore.services.{ DataSourceRepository, DataSourceService } -import com.scalableminds.webknossos.datastore.storage.DataStoreRedisStore +import com.scalableminds.webknossos.datastore.storage.{DataStoreRedisStore, RemoteSourceDescriptorService} import com.typesafe.scalalogging.LazyLogging import com.scalableminds.util.tools.Box.tryo import com.scalableminds.util.tools._ @@ -113,6 +113,7 @@ object CancelUploadInformation { class UploadService @Inject()(dataSourceRepository: DataSourceRepository, dataSourceService: DataSourceService, runningUploadMetadataStore: DataStoreRedisStore, + remoteSourceDescriptorService: RemoteSourceDescriptorService, exploreLocalLayerService: ExploreLocalLayerService, datasetSymlinkService: DatasetSymlinkService, val remoteWebknossosClient: DSRemoteWebknossosClient)(implicit ec: ExecutionContext) @@ -385,7 +386,8 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, case UploadedDataSourceType.ZARR | UploadedDataSourceType.NEUROGLANCER_PRECOMPUTED | UploadedDataSourceType.N5_MULTISCALES | UploadedDataSourceType.N5_ARRAY => exploreLocalDatasource(unpackToDir, dataSourceId, uploadedDataSourceType) - case UploadedDataSourceType.EXPLORED => Fox.successful(()) + case UploadedDataSourceType.EXPLORED => + checkPathsInUploadedDatasourcePropertiesJson(unpackToDir, dataSourceId.organizationId) case UploadedDataSourceType.ZARR_MULTILAYER | UploadedDataSourceType.NEUROGLANCER_MULTILAYER | UploadedDataSourceType.N5_MULTILAYER => tryExploringMultipleLayers(unpackToDir, dataSourceId, uploadedDataSourceType) @@ -398,6 +400,16 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, } yield () } + private def checkPathsInUploadedDatasourcePropertiesJson(unpackToDir: Path, organizationId: String): Fox[Unit] = { + val dataSource = dataSourceService.dataSourceFromDir(unpackToDir, organizationId) + for { + _ <- Fox.runOptional(dataSource.toUsable)( + usableDataSource => + Fox.fromBool( + usableDataSource.allExplicitPaths.forall(remoteSourceDescriptorService.pathIsAllowedToAddDirectly))) + } yield () + } + private def exploreLocalDatasource(path: Path, dataSourceId: DataSourceId, typ: UploadedDataSourceType.Value): Fox[Unit] = diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala index ca006de6fb9..b4b769e32a1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala @@ -150,7 +150,7 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote def pathIsAllowedToAddDirectly(pathLiteral: String): Boolean = if (pathIsLocal(pathLiteral)) - pathIsDataSourceLocal(pathLiteral) + pathIsDataSourceLocal(pathLiteral) || pathIsInLocalDirectoryWhitelist(pathLiteral) else !pathMatchesGlobalCredentials(pathLiteral) @@ -169,4 +169,9 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote private def pathMatchesGlobalCredentials(pathLiteral: String): Boolean = findGlobalCredentialFor(Some(pathLiteral)).isDefined + + private def pathIsInLocalDirectoryWhitelist(pathLiteral: String): Boolean = + pathIsLocal(pathLiteral) && + dataStoreConfig.Datastore.localDirectoryWhitelist.exists(whitelistEntry => pathLiteral.startsWith(whitelistEntry)) + } From 09ccffb3c640d2389521f5b2ddc753fded3ea4fc Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 3 Jul 2025 13:55:43 +0200 Subject: [PATCH 7/7] assert no additional paths in datasource update --- conf/messages | 1 + .../controllers/DataSourceController.scala | 8 ++++++-- .../services/DataSourceService.scala | 20 ++++++++++++++++++- .../services/uploading/UploadService.scala | 4 +++- 4 files changed, 29 insertions(+), 4 deletions(-) diff --git a/conf/messages b/conf/messages index b9fdc3443f2..2821bef974c 100644 --- a/conf/messages +++ b/conf/messages @@ -127,6 +127,7 @@ dataVault.getPath.failed=Failed to get remote path dataSource.notFound=Datasource not found on datastore server. Might still be initializing. dataSource.add.pathsNotAllowed=Cannot directly add a datasource with local paths that leave the dataset, or with paths that match the WEBKNOSSOS object storage. +dataSource.update.newExplicitPaths=Cannot update a dataset with new explicit paths. To add mags or layers, please use the compose functionality. dataStore.list.failed=Failed to retrieve list of data stores. dataStore.notFound=DataStore not found. diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index b3abe551985..e1f947eed80 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -380,7 +380,9 @@ class DataSourceController @Inject()( for { dataSource <- dataSourceRepository.get(DataSourceId(datasetDirectoryName, organizationId)).toFox ?~> Messages( "dataSource.notFound") ~> NOT_FOUND - _ <- dataSourceService.updateDataSource(request.body.copy(id = dataSource.id), expectExisting = true) + _ <- dataSourceService.updateDataSource(request.body.copy(id = dataSource.id), + expectExisting = true, + preventNewPaths = true) } yield Ok } } @@ -408,7 +410,9 @@ class DataSourceController @Inject()( ) ) ?~> "dataset.upload.validation.failed" datasourceId = DataSourceId(reservedAdditionalInfo.directoryName, organizationId) - _ <- dataSourceService.updateDataSource(request.body.copy(id = datasourceId), expectExisting = false) + _ <- dataSourceService.updateDataSource(request.body.copy(id = datasourceId), + expectExisting = false, + preventNewPaths = false) uploadedDatasetId <- dsRemoteWebknossosClient.reportUpload(datasourceId, 0L, needsConversion = false, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala index bb37c5f2f6b..3837e1ec902 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala @@ -269,7 +269,7 @@ class DataSourceService @Inject()( } } - def updateDataSource(dataSource: DataSource, expectExisting: Boolean): Fox[Unit] = { + def updateDataSource(dataSource: DataSource, expectExisting: Boolean, preventNewPaths: Boolean): Fox[Unit] = { val organizationDir = dataBaseDir.resolve(dataSource.id.organizationId) val dataSourcePath = organizationDir.resolve(dataSource.id.directoryName) for { @@ -277,12 +277,30 @@ class DataSourceService @Inject()( propertiesFile = dataSourcePath.resolve(propertiesFileName) _ <- Fox.runIf(!expectExisting)(ensureDirectoryBox(dataSourcePath).toFox) _ <- Fox.runIf(!expectExisting)(Fox.fromBool(!Files.exists(propertiesFile))) ?~> "dataSource.alreadyPresent" + _ <- Fox.runIf(expectExisting && preventNewPaths)(assertNoNewPaths(dataSourcePath, dataSource)) ?~> "dataSource.update.newExplicitPaths" _ <- Fox.runIf(expectExisting)(backupPreviousProperties(dataSourcePath).toFox) ?~> "Could not update datasource-properties.json" _ <- JsonHelper.writeToFile(propertiesFile, dataSource).toFox ?~> "Could not update datasource-properties.json" _ <- dataSourceRepository.updateDataSource(dataSource) } yield () } + private def assertNoNewPaths(dataSourcePath: Path, newDataSource: DataSource): Fox[Unit] = { + val propertiesPath = dataSourcePath.resolve(propertiesFileName) + if (Files.exists(propertiesPath)) { + Fox + .runOptional(newDataSource.toUsable) { newUsableDataSource => + Fox.runOptional(dataSourceFromDir(dataSourcePath, newDataSource.id.organizationId).toUsable) { + oldUsableDataSource => + val oldPaths = oldUsableDataSource.allExplicitPaths.toSet + Fox.fromBool(newUsableDataSource.allExplicitPaths.forall(oldPaths.contains)) + } + } + .map(_ => ()) + } else { + Fox.successful(()) + } + } + private def backupPreviousProperties(dataSourcePath: Path): Box[Unit] = { val propertiesFile = dataSourcePath.resolve(propertiesFileName) val previousContentOrEmpty = if (Files.exists(propertiesFile)) { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala index b400ca88db9..0a6ddec37a1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala @@ -507,7 +507,9 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, dataSourceUsable <- dataSource.toUsable.toFox ?~> "Uploaded dataset has no valid properties file, cannot link layers" layers <- Fox.serialCombined(layersToLink)(layerFromIdentifier) dataSourceWithLinkedLayers = dataSourceUsable.copy(dataLayers = dataSourceUsable.dataLayers ::: layers) - _ <- dataSourceService.updateDataSource(dataSourceWithLinkedLayers, expectExisting = true) ?~> "Could not write combined properties file" + _ <- dataSourceService.updateDataSource(dataSourceWithLinkedLayers, + expectExisting = true, + preventNewPaths = false) ?~> "Could not write combined properties file" } yield () }