@@ -25,6 +25,12 @@ import com.scalableminds.webknossos.datastore.services.uploading._
25
25
import com .scalableminds .webknossos .datastore .storage .DataVaultService
26
26
import com .scalableminds .util .tools .Box .tryo
27
27
import com .scalableminds .util .tools .{Box , Empty , Failure , Full }
28
+ import com .scalableminds .webknossos .datastore .services .connectome .{
29
+ ByAgglomerateIdsRequest ,
30
+ BySynapseIdsRequest ,
31
+ ConnectomeFileService ,
32
+ SynapticPartnerDirection
33
+ }
28
34
import com .scalableminds .webknossos .datastore .services .mapping .AgglomerateService
29
35
import play .api .data .Form
30
36
import play .api .data .Forms .{longNumber , nonEmptyText , number , tuple }
@@ -452,14 +458,16 @@ class DataSourceController @Inject()(
452
458
meshFileService.clearCache(dataSourceId, layerName)
453
459
val closedSegmentIndexFileHandleCount =
454
460
segmentIndexFileService.clearCache(dataSourceId, layerName)
461
+ val closedConnectomeFileHandleCount =
462
+ connectomeFileService.clearCache(dataSourceId, layerName)
455
463
val reloadedDataSource : InboxDataSource = dataSourceService.dataSourceFromDir(
456
464
dataSourceService.dataBaseDir.resolve(organizationId).resolve(datasetDirectoryName),
457
465
organizationId)
458
466
datasetErrorLoggingService.clearForDataset(organizationId, datasetDirectoryName)
459
467
val clearedVaultCacheEntriesOpt = dataSourceService.invalidateVaultCache(reloadedDataSource, layerName)
460
468
clearedVaultCacheEntriesOpt.foreach { clearedVaultCacheEntries =>
461
469
logger.info(
462
- s " Cleared caches for ${layerName.map(l => s " layer ' $l' of " ).getOrElse(" " )}dataset $organizationId/ $datasetDirectoryName: closed $closedAgglomerateFileHandleCount agglomerate file handles, $closedMeshFileHandleCount mesh file handles, $closedSegmentIndexFileHandleCount segment index file handles, removed $clearedBucketProviderCount bucketProviders, $clearedVaultCacheEntries vault cache entries and $removedChunksCount image chunk cache entries. " )
470
+ s " Cleared caches for ${layerName.map(l => s " layer ' $l' of " ).getOrElse(" " )}dataset $organizationId/ $datasetDirectoryName: closed $closedAgglomerateFileHandleCount agglomerate file handles, $closedMeshFileHandleCount mesh file handles, $closedSegmentIndexFileHandleCount segment index file handles, $closedConnectomeFileHandleCount connectome file handles, removed $clearedBucketProviderCount bucketProviders, $clearedVaultCacheEntries vault cache entries and $removedChunksCount image chunk cache entries. " )
463
471
}
464
472
reloadedDataSource
465
473
}
@@ -510,21 +518,12 @@ class DataSourceController @Inject()(
510
518
Action .async { implicit request =>
511
519
accessTokenService.validateAccessFromTokenContext(
512
520
UserAccessRequest .readDataSources(DataSourceId (datasetDirectoryName, organizationId))) {
513
- val connectomeFileNames =
514
- connectomeFileService.exploreConnectomeFiles(organizationId, datasetDirectoryName, dataLayerName)
515
521
for {
516
- mappingNames <- Fox .serialCombined(connectomeFileNames.toList) { connectomeFileName =>
517
- val path =
518
- connectomeFileService.connectomeFilePath(organizationId,
519
- datasetDirectoryName,
520
- dataLayerName,
521
- connectomeFileName)
522
- connectomeFileService.mappingNameForConnectomeFile(path)
523
- }
524
- connectomesWithMappings = connectomeFileNames
525
- .zip(mappingNames)
526
- .map(tuple => ConnectomeFileNameWithMappingName (tuple._1, tuple._2))
527
- } yield Ok (Json .toJson(connectomesWithMappings))
522
+ (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId,
523
+ datasetDirectoryName,
524
+ dataLayerName)
525
+ connectomeFileInfos <- connectomeFileService.listConnectomeFiles(dataSource.id, dataLayer)
526
+ } yield Ok (Json .toJson(connectomeFileInfos))
528
527
}
529
528
}
530
529
@@ -535,10 +534,13 @@ class DataSourceController @Inject()(
535
534
accessTokenService.validateAccessFromTokenContext(
536
535
UserAccessRequest .readDataSources(DataSourceId (datasetDirectoryName, organizationId))) {
537
536
for {
538
- meshFilePath <- Fox .successful(
539
- connectomeFileService
540
- .connectomeFilePath(organizationId, datasetDirectoryName, dataLayerName, request.body.connectomeFile))
541
- synapses <- connectomeFileService.synapsesForAgglomerates(meshFilePath, request.body.agglomerateIds)
537
+ (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId,
538
+ datasetDirectoryName,
539
+ dataLayerName)
540
+ meshFileKey <- connectomeFileService.lookUpConnectomeFileKey(dataSource.id,
541
+ dataLayer,
542
+ request.body.connectomeFile)
543
+ synapses <- connectomeFileService.synapsesForAgglomerates(meshFileKey, request.body.agglomerateIds)
542
544
} yield Ok (Json .toJson(synapses))
543
545
}
544
546
}
@@ -551,12 +553,18 @@ class DataSourceController @Inject()(
551
553
accessTokenService.validateAccessFromTokenContext(
552
554
UserAccessRequest .readDataSources(DataSourceId (datasetDirectoryName, organizationId))) {
553
555
for {
554
- meshFilePath <- Fox .successful(
555
- connectomeFileService
556
- .connectomeFilePath(organizationId, datasetDirectoryName, dataLayerName, request.body.connectomeFile))
557
- agglomerateIds <- connectomeFileService.synapticPartnerForSynapses(meshFilePath,
556
+ directionValidated <- SynapticPartnerDirection
557
+ .fromString(direction)
558
+ .toFox ?~> " could not parse synaptic partner direction"
559
+ (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId,
560
+ datasetDirectoryName,
561
+ dataLayerName)
562
+ meshFileKey <- connectomeFileService.lookUpConnectomeFileKey(dataSource.id,
563
+ dataLayer,
564
+ request.body.connectomeFile)
565
+ agglomerateIds <- connectomeFileService.synapticPartnerForSynapses(meshFileKey,
558
566
request.body.synapseIds,
559
- direction )
567
+ directionValidated )
560
568
} yield Ok (Json .toJson(agglomerateIds))
561
569
}
562
570
}
@@ -568,10 +576,13 @@ class DataSourceController @Inject()(
568
576
accessTokenService.validateAccessFromTokenContext(
569
577
UserAccessRequest .readDataSources(DataSourceId (datasetDirectoryName, organizationId))) {
570
578
for {
571
- meshFilePath <- Fox .successful(
572
- connectomeFileService
573
- .connectomeFilePath(organizationId, datasetDirectoryName, dataLayerName, request.body.connectomeFile))
574
- synapsePositions <- connectomeFileService.positionsForSynapses(meshFilePath, request.body.synapseIds)
579
+ (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId,
580
+ datasetDirectoryName,
581
+ dataLayerName)
582
+ meshFileKey <- connectomeFileService.lookUpConnectomeFileKey(dataSource.id,
583
+ dataLayer,
584
+ request.body.connectomeFile)
585
+ synapsePositions <- connectomeFileService.positionsForSynapses(meshFileKey, request.body.synapseIds)
575
586
} yield Ok (Json .toJson(synapsePositions))
576
587
}
577
588
}
@@ -583,10 +594,13 @@ class DataSourceController @Inject()(
583
594
accessTokenService.validateAccessFromTokenContext(
584
595
UserAccessRequest .readDataSources(DataSourceId (datasetDirectoryName, organizationId))) {
585
596
for {
586
- meshFilePath <- Fox .successful(
587
- connectomeFileService
588
- .connectomeFilePath(organizationId, datasetDirectoryName, dataLayerName, request.body.connectomeFile))
589
- synapseTypes <- connectomeFileService.typesForSynapses(meshFilePath, request.body.synapseIds)
597
+ (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId,
598
+ datasetDirectoryName,
599
+ dataLayerName)
600
+ meshFileKey <- connectomeFileService.lookUpConnectomeFileKey(dataSource.id,
601
+ dataLayer,
602
+ request.body.connectomeFile)
603
+ synapseTypes <- connectomeFileService.typesForSynapses(meshFileKey, request.body.synapseIds)
590
604
} yield Ok (Json .toJson(synapseTypes))
591
605
}
592
606
}
0 commit comments