4
4
import numpy as np
5
5
import inspect
6
6
import importlib
7
+ import element_data_loader
7
8
8
9
from .readers import spikeglx , kilosort , openephys
9
- from . import probe , find_full_path , find_root_directory , dict_to_uuid
10
+ from . import probe
10
11
11
12
schema = dj .schema ()
12
13
@@ -46,7 +47,6 @@ def activate(ephys_schema_name, probe_schema_name=None, *, create_schema=True,
46
47
global _linking_module
47
48
_linking_module = linking_module
48
49
49
- # activate
50
50
probe .activate (probe_schema_name , create_schema = create_schema ,
51
51
create_tables = create_tables )
52
52
schema .activate (ephys_schema_name , create_schema = create_schema ,
@@ -140,14 +140,16 @@ class EphysFile(dj.Part):
140
140
"""
141
141
142
142
def make (self , key ):
143
- sess_dir = pathlib .Path (get_session_directory (key ))
143
+
144
+ session_dir = element_data_loader .utils .find_full_path (get_ephys_root_data_dir (),
145
+ get_session_directory (key ))
144
146
145
147
inserted_probe_serial_number = (ProbeInsertion * probe .Probe & key ).fetch1 ('probe' )
146
148
147
149
# search session dir and determine acquisition software
148
150
for ephys_pattern , ephys_acq_type in zip (['*.ap.meta' , '*.oebin' ],
149
151
['SpikeGLX' , 'Open Ephys' ]):
150
- ephys_meta_filepaths = [fp for fp in sess_dir .rglob (ephys_pattern )]
152
+ ephys_meta_filepaths = [fp for fp in session_dir .rglob (ephys_pattern )]
151
153
if ephys_meta_filepaths :
152
154
acq_software = ephys_acq_type
153
155
break
@@ -187,12 +189,12 @@ def make(self, key):
187
189
'acq_software' : acq_software ,
188
190
'sampling_rate' : spikeglx_meta .meta ['imSampRate' ]})
189
191
190
- root_dir = find_root_directory (get_ephys_root_data_dir (), meta_filepath )
192
+ root_dir = element_data_loader . utils . find_root_directory (get_ephys_root_data_dir (), meta_filepath )
191
193
self .EphysFile .insert1 ({
192
194
** key ,
193
195
'file_path' : meta_filepath .relative_to (root_dir ).as_posix ()})
194
196
elif acq_software == 'Open Ephys' :
195
- dataset = openephys .OpenEphys (sess_dir )
197
+ dataset = openephys .OpenEphys (session_dir )
196
198
for serial_number , probe_data in dataset .probes .items ():
197
199
if str (serial_number ) == inserted_probe_serial_number :
198
200
break
@@ -220,7 +222,7 @@ def make(self, key):
220
222
'acq_software' : acq_software ,
221
223
'sampling_rate' : probe_data .ap_meta ['sample_rate' ]})
222
224
223
- root_dir = find_root_directory (
225
+ root_dir = element_data_loader . utils . find_root_directory (
224
226
get_ephys_root_data_dir (),
225
227
probe_data .recording_info ['recording_files' ][0 ])
226
228
self .EphysFile .insert ([{** key ,
@@ -290,8 +292,12 @@ def make(self, key):
290
292
shank , shank_col , shank_row , _ = spikeglx_recording .apmeta .shankmap ['data' ][recorded_site ]
291
293
electrode_keys .append (probe_electrodes [(shank , shank_col , shank_row )])
292
294
elif acq_software == 'Open Ephys' :
293
- sess_dir = pathlib .Path (get_session_directory (key ))
294
- loaded_oe = openephys .OpenEphys (sess_dir )
295
+
296
+ session_dir = element_data_loader .utils .find_full_path (
297
+ get_ephys_root_data_dir (),
298
+ get_session_directory (key ))
299
+
300
+ loaded_oe = openephys .OpenEphys (session_dir )
295
301
oe_probe = loaded_oe .probes [probe_sn ]
296
302
297
303
lfp_channel_ind = np .arange (
@@ -358,7 +364,7 @@ def insert_new_params(cls, processing_method: str, paramset_idx: int,
358
364
'paramset_idx' : paramset_idx ,
359
365
'paramset_desc' : paramset_desc ,
360
366
'params' : params ,
361
- 'param_set_hash' : dict_to_uuid (params )}
367
+ 'param_set_hash' : element_data_loader . utils . dict_to_uuid (params )}
362
368
param_query = cls & {'param_set_hash' : param_dict ['param_set_hash' ]}
363
369
364
370
if param_query : # If the specified param-set already exists
@@ -420,7 +426,7 @@ class Clustering(dj.Imported):
420
426
def make (self , key ):
421
427
task_mode , output_dir = (ClusteringTask & key ).fetch1 (
422
428
'task_mode' , 'clustering_output_dir' )
423
- kilosort_dir = find_full_path (get_ephys_root_data_dir (), output_dir )
429
+ kilosort_dir = element_data_loader . utils . find_full_path (get_ephys_root_data_dir (), output_dir )
424
430
425
431
if task_mode == 'load' :
426
432
kilosort_dataset = kilosort .Kilosort (kilosort_dir ) # check if the directory is a valid Kilosort output
@@ -450,23 +456,26 @@ class Curation(dj.Manual):
450
456
451
457
def create1_from_clustering_task (self , key , curation_note = '' ):
452
458
"""
453
- A convenient function to create a new corresponding "Curation"
454
- for a particular "ClusteringTask"
459
+ A function to create a new corresponding "Curation" for a particular
460
+ "ClusteringTask", which assumes that no curation was performed on the
461
+ dataset
455
462
"""
456
463
if key not in Clustering ():
457
464
raise ValueError (f'No corresponding entry in Clustering available'
458
465
f' for: { key } ; do `Clustering.populate(key)`' )
459
466
460
467
task_mode , output_dir = (ClusteringTask & key ).fetch1 (
461
468
'task_mode' , 'clustering_output_dir' )
462
- kilosort_dir = find_full_path (get_ephys_root_data_dir (), output_dir )
469
+ kilosort_dir = element_data_loader . utils . find_full_path (get_ephys_root_data_dir (), output_dir )
463
470
464
471
creation_time , is_curated , is_qc = kilosort .extract_clustering_info (kilosort_dir )
465
472
# Synthesize curation_id
466
473
curation_id = dj .U ().aggr (self & key , n = 'ifnull(max(curation_id)+1,1)' ).fetch1 ('n' )
467
474
self .insert1 ({** key , 'curation_id' : curation_id ,
468
- 'curation_time' : creation_time , 'curation_output_dir' : output_dir ,
469
- 'quality_control' : is_qc , 'manual_curation' : is_curated ,
475
+ 'curation_time' : creation_time ,
476
+ 'curation_output_dir' : output_dir ,
477
+ 'quality_control' : is_qc ,
478
+ 'manual_curation' : is_curated ,
470
479
'curation_note' : curation_note })
471
480
472
481
@@ -493,7 +502,7 @@ class Unit(dj.Part):
493
502
494
503
def make (self , key ):
495
504
output_dir = (Curation & key ).fetch1 ('curation_output_dir' )
496
- kilosort_dir = find_full_path (get_ephys_root_data_dir (), output_dir )
505
+ kilosort_dir = element_data_loader . utils . find_full_path (get_ephys_root_data_dir (), output_dir )
497
506
498
507
kilosort_dataset = kilosort .Kilosort (kilosort_dir )
499
508
acq_software = (EphysRecording & key ).fetch1 ('acq_software' )
@@ -571,7 +580,7 @@ class Waveform(dj.Part):
571
580
572
581
def make (self , key ):
573
582
output_dir = (Curation & key ).fetch1 ('curation_output_dir' )
574
- kilosort_dir = find_full_path (get_ephys_root_data_dir (), output_dir )
583
+ kilosort_dir = element_data_loader . utils . find_full_path (get_ephys_root_data_dir (), output_dir )
575
584
576
585
kilosort_dataset = kilosort .Kilosort (kilosort_dir )
577
586
@@ -613,8 +622,9 @@ def yield_unit_waveforms():
613
622
spikeglx_meta_filepath = get_spikeglx_meta_filepath (key )
614
623
neuropixels_recording = spikeglx .SpikeGLX (spikeglx_meta_filepath .parent )
615
624
elif acq_software == 'Open Ephys' :
616
- sess_dir = pathlib .Path (get_session_directory (key ))
617
- openephys_dataset = openephys .OpenEphys (sess_dir )
625
+ session_dir = element_data_loader .utils .find_full_path (get_ephys_root_data_dir (),
626
+ get_session_directory (key ))
627
+ openephys_dataset = openephys .OpenEphys (session_dir )
618
628
neuropixels_recording = openephys_dataset .probes [probe_serial_number ]
619
629
620
630
def yield_unit_waveforms ():
@@ -654,16 +664,17 @@ def get_spikeglx_meta_filepath(ephys_recording_key):
654
664
& 'file_path LIKE "%.ap.meta"' ).fetch1 ('file_path' )
655
665
656
666
try :
657
- spikeglx_meta_filepath = find_full_path (get_ephys_root_data_dir (),
667
+ spikeglx_meta_filepath = element_data_loader . utils . find_full_path (get_ephys_root_data_dir (),
658
668
spikeglx_meta_filepath )
659
669
except FileNotFoundError :
660
670
# if not found, search in session_dir again
661
671
if not spikeglx_meta_filepath .exists ():
662
- sess_dir = pathlib .Path (get_session_directory (ephys_recording_key ))
672
+ session_dir = element_data_loader .utils .find_full_path (get_ephys_root_data_dir (),
673
+ get_session_directory (ephys_recording_key ))
663
674
inserted_probe_serial_number = (ProbeInsertion * probe .Probe
664
675
& ephys_recording_key ).fetch1 ('probe' )
665
676
666
- spikeglx_meta_filepaths = [fp for fp in sess_dir .rglob ('*.ap.meta' )]
677
+ spikeglx_meta_filepaths = [fp for fp in session_dir .rglob ('*.ap.meta' )]
667
678
for meta_filepath in spikeglx_meta_filepaths :
668
679
spikeglx_meta = spikeglx .SpikeGLXMeta (meta_filepath )
669
680
if str (spikeglx_meta .probe_SN ) == inserted_probe_serial_number :
@@ -696,8 +707,9 @@ def get_neuropixels_channel2electrode_map(ephys_recording_key, acq_software):
696
707
for recorded_site , (shank , shank_col , shank_row , _ ) in enumerate (
697
708
spikeglx_meta .shankmap ['data' ])}
698
709
elif acq_software == 'Open Ephys' :
699
- sess_dir = pathlib .Path (get_session_directory (ephys_recording_key ))
700
- openephys_dataset = openephys .OpenEphys (sess_dir )
710
+ session_dir = element_data_loader .utils .find_full_path (get_ephys_root_data_dir (),
711
+ get_session_directory (ephys_recording_key ))
712
+ openephys_dataset = openephys .OpenEphys (session_dir )
701
713
probe_serial_number = (ProbeInsertion & ephys_recording_key ).fetch1 ('probe' )
702
714
probe_dataset = openephys_dataset .probes [probe_serial_number ]
703
715
@@ -723,7 +735,7 @@ def generate_electrode_config(probe_type: str, electrodes: list):
723
735
:return: a dict representing a key of the probe.ElectrodeConfig table
724
736
"""
725
737
# compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode)
726
- electrode_config_hash = dict_to_uuid ({k ['electrode' ]: k for k in electrodes })
738
+ electrode_config_hash = element_data_loader . utils . dict_to_uuid ({k ['electrode' ]: k for k in electrodes })
727
739
728
740
electrode_list = sorted ([k ['electrode' ] for k in electrodes ])
729
741
electrode_gaps = ([- 1 ]
0 commit comments