@@ -1033,6 +1033,16 @@ def make(self, key):
1033
1033
1034
1034
if si_sorting_analyzer_dir .exists (): # Read from spikeinterface outputs
1035
1035
import spikeinterface as si
1036
+ from spikeinterface import sorters
1037
+
1038
+ sorting_file = output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl"
1039
+ si_sorting_ : si .sorters .BaseSorter = si .load_extractor (
1040
+ sorting_file , base_folder = output_dir
1041
+ )
1042
+ if si_sorting_ .unit_ids .size == 0 :
1043
+ logger .info (f"No units found in { sorting_file } . Skipping Unit ingestion..." )
1044
+ self .insert1 (key )
1045
+ return
1036
1046
1037
1047
sorting_analyzer = si .load_sorting_analyzer (folder = si_sorting_analyzer_dir )
1038
1048
si_sorting = sorting_analyzer .sorting
@@ -1241,6 +1251,11 @@ def make(self, key):
1241
1251
output_dir = find_full_path (get_ephys_root_data_dir (), output_dir )
1242
1252
sorter_name = clustering_method .replace ("." , "_" )
1243
1253
1254
+ self .insert1 (key )
1255
+ if not len (CuratedClustering .Unit & key ):
1256
+ logger .info (f"No CuratedClustering.Unit found for { key } , skipping Waveform ingestion." )
1257
+ return
1258
+
1244
1259
# Get channel and electrode-site mapping
1245
1260
electrode_query = (EphysRecording .Channel & key ).proj (..., "-channel_name" )
1246
1261
channel2electrode_map : dict [int , dict ] = {
@@ -1294,7 +1309,6 @@ def yield_unit_waveforms():
1294
1309
]
1295
1310
1296
1311
yield unit_peak_waveform , unit_electrode_waveforms
1297
-
1298
1312
else : # read from kilosort outputs (ecephys pipeline)
1299
1313
kilosort_dataset = kilosort .Kilosort (output_dir )
1300
1314
@@ -1394,7 +1408,6 @@ def yield_unit_waveforms():
1394
1408
yield unit_peak_waveform , unit_electrode_waveforms
1395
1409
1396
1410
# insert waveform on a per-unit basis to mitigate potential memory issue
1397
- self .insert1 (key )
1398
1411
for unit_peak_waveform , unit_electrode_waveforms in yield_unit_waveforms ():
1399
1412
if unit_peak_waveform :
1400
1413
self .PeakWaveform .insert1 (unit_peak_waveform , ignore_extra_fields = True )
@@ -1501,6 +1514,11 @@ def make(self, key):
1501
1514
output_dir = find_full_path (get_ephys_root_data_dir (), output_dir )
1502
1515
sorter_name = clustering_method .replace ("." , "_" )
1503
1516
1517
+ self .insert1 (key )
1518
+ if not len (CuratedClustering .Unit & key ):
1519
+ logger .info (f"No CuratedClustering.Unit found for { key } , skipping QualityMetrics ingestion." )
1520
+ return
1521
+
1504
1522
si_sorting_analyzer_dir = output_dir / sorter_name / "sorting_analyzer"
1505
1523
if si_sorting_analyzer_dir .exists (): # read from spikeinterface outputs
1506
1524
import spikeinterface as si
@@ -1556,7 +1574,6 @@ def make(self, key):
1556
1574
for unit_key in (CuratedClustering .Unit & key ).fetch ("KEY" )
1557
1575
]
1558
1576
1559
- self .insert1 (key )
1560
1577
self .Cluster .insert (metrics_list , ignore_extra_fields = True )
1561
1578
self .Waveform .insert (metrics_list , ignore_extra_fields = True )
1562
1579
0 commit comments