@@ -185,7 +185,8 @@ def auto_generate_entries(cls, session_key):
185
185
186
186
probe_dir = meta_filepath .parent
187
187
try :
188
- probe_number = re .search ("(imec)?\d{1}$" , probe_dir .name ).group ()
188
+ probe_number = re .search (
189
+ "(imec)?\d{1}$" , probe_dir .name ).group ()
189
190
probe_number = int (probe_number .replace ("imec" , "" ))
190
191
except AttributeError :
191
192
probe_number = meta_fp_idx
@@ -214,7 +215,8 @@ def auto_generate_entries(cls, session_key):
214
215
}
215
216
)
216
217
else :
217
- raise NotImplementedError (f"Unknown acquisition software: { acq_software } " )
218
+ raise NotImplementedError (
219
+ f"Unknown acquisition software: { acq_software } " )
218
220
219
221
probe .Probe .insert (probe_list , skip_duplicates = True )
220
222
cls .insert (probe_insertion_list , skip_duplicates = True )
@@ -231,8 +233,7 @@ class InsertionLocation(dj.Manual):
231
233
ml_location (decimal (6, 2) ): Medial-lateral location in micrometers. Reference is zero with right side values positive.
232
234
depth (decimal (6, 2) ): Manipulator depth relative to the surface of the brain at zero. Ventral is negative.
233
235
Theta (decimal (5, 2) ): elevation - rotation about the ml-axis in degrees relative to positive z-axis.
234
- phi (decimal (5, 2) ): azimuth - rotation about the dv-axis in degrees relative to the positive x-axis
235
-
236
+ phi (decimal (5, 2) ): azimuth - rotation about the dv-axis in degrees relative to the positive x-axis.
236
237
"""
237
238
238
239
definition = """
@@ -322,12 +323,14 @@ def make(self, key):
322
323
break
323
324
else :
324
325
raise FileNotFoundError (
325
- "No SpikeGLX data found for probe insertion: {}" .format (key )
326
+ "No SpikeGLX data found for probe insertion: {}" .format (
327
+ key )
326
328
)
327
329
328
330
if spikeglx_meta .probe_model in supported_probe_types :
329
331
probe_type = spikeglx_meta .probe_model
330
- electrode_query = probe .ProbeType .Electrode & {"probe_type" : probe_type }
332
+ electrode_query = probe .ProbeType .Electrode & {
333
+ "probe_type" : probe_type }
331
334
332
335
probe_electrodes = {
333
336
(shank , shank_col , shank_row ): key
@@ -360,9 +363,11 @@ def make(self, key):
360
363
}
361
364
)
362
365
363
- root_dir = find_root_directory (get_ephys_root_data_dir (), meta_filepath )
366
+ root_dir = find_root_directory (
367
+ get_ephys_root_data_dir (), meta_filepath )
364
368
self .EphysFile .insert1 (
365
- {** key , "file_path" : meta_filepath .relative_to (root_dir ).as_posix ()}
369
+ {** key ,
370
+ "file_path" : meta_filepath .relative_to (root_dir ).as_posix ()}
366
371
)
367
372
elif acq_software == "Open Ephys" :
368
373
dataset = openephys .OpenEphys (session_dir )
@@ -371,7 +376,8 @@ def make(self, key):
371
376
break
372
377
else :
373
378
raise FileNotFoundError (
374
- "No Open Ephys data found for probe insertion: {}" .format (key )
379
+ "No Open Ephys data found for probe insertion: {}" .format (
380
+ key )
375
381
)
376
382
377
383
if not probe_data .ap_meta :
@@ -381,7 +387,8 @@ def make(self, key):
381
387
382
388
if probe_data .probe_model in supported_probe_types :
383
389
probe_type = probe_data .probe_model
384
- electrode_query = probe .ProbeType .Electrode & {"probe_type" : probe_type }
390
+ electrode_query = probe .ProbeType .Electrode & {
391
+ "probe_type" : probe_type }
385
392
386
393
probe_electrodes = {
387
394
key ["electrode" ]: key for key in electrode_query .fetch ("KEY" )
@@ -394,7 +401,8 @@ def make(self, key):
394
401
else :
395
402
raise NotImplementedError (
396
403
"Processing for neuropixels"
397
- " probe model {} not yet implemented" .format (probe_data .probe_model )
404
+ " probe model {} not yet implemented" .format (
405
+ probe_data .probe_model )
398
406
)
399
407
400
408
self .insert1 (
@@ -456,7 +464,7 @@ class LFP(dj.Imported):
456
464
457
465
class Electrode (dj .Part ):
458
466
"""Saves local field potential data for each electrode.
459
-
467
+
460
468
Attributes:
461
469
LFP (foreign key): LFP primary key.
462
470
probe.ElectrodeConfig.Electrode (foreign key): probe.ElectrodeConfig.Electrode primary key.
@@ -476,24 +484,27 @@ class Electrode(dj.Part):
476
484
477
485
def make (self , key ):
478
486
"""Populates the LFP tables."""
479
- acq_software = (EphysRecording * ProbeInsertion & key ).fetch1 ("acq_software" )
487
+ acq_software = (EphysRecording * ProbeInsertion &
488
+ key ).fetch1 ("acq_software" )
480
489
481
490
electrode_keys , lfp = [], []
482
491
483
492
if acq_software == "SpikeGLX" :
484
493
spikeglx_meta_filepath = get_spikeglx_meta_filepath (key )
485
- spikeglx_recording = spikeglx .SpikeGLX (spikeglx_meta_filepath .parent )
494
+ spikeglx_recording = spikeglx .SpikeGLX (
495
+ spikeglx_meta_filepath .parent )
486
496
487
497
lfp_channel_ind = spikeglx_recording .lfmeta .recording_channels [
488
- - 1 :: - self ._skip_channel_counts
498
+ - 1 :: - self ._skip_channel_counts
489
499
]
490
500
491
501
# Extract LFP data at specified channels and convert to uV
492
502
lfp = spikeglx_recording .lf_timeseries [
493
503
:, lfp_channel_ind
494
504
] # (sample x channel)
495
505
lfp = (
496
- lfp * spikeglx_recording .get_channel_bit_volts ("lf" )[lfp_channel_ind ]
506
+ lfp *
507
+ spikeglx_recording .get_channel_bit_volts ("lf" )[lfp_channel_ind ]
497
508
).T # (channel x sample)
498
509
499
510
self .insert1 (
@@ -525,18 +536,21 @@ def make(self, key):
525
536
shank , shank_col , shank_row , _ = spikeglx_recording .apmeta .shankmap [
526
537
"data"
527
538
][recorded_site ]
528
- electrode_keys .append (probe_electrodes [(shank , shank_col , shank_row )])
539
+ electrode_keys .append (
540
+ probe_electrodes [(shank , shank_col , shank_row )])
529
541
elif acq_software == "Open Ephys" :
530
542
oe_probe = get_openephys_probe_data (key )
531
543
532
544
lfp_channel_ind = np .r_ [
533
545
len (oe_probe .lfp_meta ["channels_indices" ])
534
- - 1 : 0 : - self ._skip_channel_counts
546
+ - 1 : 0 : - self ._skip_channel_counts
535
547
]
536
548
537
- lfp = oe_probe .lfp_timeseries [:, lfp_channel_ind ] # (sample x channel)
549
+ # (sample x channel)
550
+ lfp = oe_probe .lfp_timeseries [:, lfp_channel_ind ]
538
551
lfp = (
539
- lfp * np .array (oe_probe .lfp_meta ["channels_gains" ])[lfp_channel_ind ]
552
+ lfp *
553
+ np .array (oe_probe .lfp_meta ["channels_gains" ])[lfp_channel_ind ]
540
554
).T # (channel x sample)
541
555
lfp_timestamps = oe_probe .lfp_timestamps
542
556
@@ -608,7 +622,7 @@ class ClusteringParamSet(dj.Lookup):
608
622
ClusteringMethod (dict): ClusteringMethod primary key.
609
623
paramset_desc (varchar(128) ): Description of the clustering parameter set.
610
624
param_set_hash (uuid): UUID hash for the parameter set.
611
- params (longblob)
625
+ params (longblob): Parameters for clustering with Kilosort.
612
626
"""
613
627
614
628
definition = """
@@ -764,7 +778,8 @@ def auto_generate_entries(cls, ephys_recording_key: dict, paramset_idx: int = 0)
764
778
key = {** ephys_recording_key , "paramset_idx" : paramset_idx }
765
779
766
780
processed_dir = get_processed_root_data_dir ()
767
- output_dir = ClusteringTask .infer_output_dir (key , relative = False , mkdir = True )
781
+ output_dir = ClusteringTask .infer_output_dir (
782
+ key , relative = False , mkdir = True )
768
783
769
784
try :
770
785
kilosort .Kilosort (
@@ -811,7 +826,8 @@ def make(self, key):
811
826
)
812
827
813
828
if not output_dir :
814
- output_dir = ClusteringTask .infer_output_dir (key , relative = True , mkdir = True )
829
+ output_dir = ClusteringTask .infer_output_dir (
830
+ key , relative = True , mkdir = True )
815
831
# update clustering_output_dir
816
832
ClusteringTask .update1 (
817
833
{** key , "clustering_output_dir" : output_dir .as_posix ()}
@@ -1022,7 +1038,8 @@ def make(self, key):
1022
1038
"acq_software" , "sampling_rate"
1023
1039
)
1024
1040
1025
- sample_rate = kilosort_dataset .data ["params" ].get ("sample_rate" , sample_rate )
1041
+ sample_rate = kilosort_dataset .data ["params" ].get (
1042
+ "sample_rate" , sample_rate )
1026
1043
1027
1044
# ---------- Unit ----------
1028
1045
# -- Remove 0-spike units
@@ -1034,7 +1051,8 @@ def make(self, key):
1034
1051
valid_units = kilosort_dataset .data ["cluster_ids" ][withspike_idx ]
1035
1052
valid_unit_labels = kilosort_dataset .data ["cluster_groups" ][withspike_idx ]
1036
1053
# -- Get channel and electrode-site mapping
1037
- channel2electrodes = get_neuropixels_channel2electrode_map (key , acq_software )
1054
+ channel2electrodes = get_neuropixels_channel2electrode_map (
1055
+ key , acq_software )
1038
1056
1039
1057
# -- Spike-times --
1040
1058
# spike_times_sec_adj > spike_times_sec > spike_times
@@ -1201,7 +1219,8 @@ def yield_unit_waveforms():
1201
1219
else :
1202
1220
if acq_software == "SpikeGLX" :
1203
1221
spikeglx_meta_filepath = get_spikeglx_meta_filepath (key )
1204
- neuropixels_recording = spikeglx .SpikeGLX (spikeglx_meta_filepath .parent )
1222
+ neuropixels_recording = spikeglx .SpikeGLX (
1223
+ spikeglx_meta_filepath .parent )
1205
1224
elif acq_software == "Open Ephys" :
1206
1225
session_dir = find_full_path (
1207
1226
get_ephys_root_data_dir (), get_session_directory (key )
@@ -1249,9 +1268,11 @@ def yield_unit_waveforms():
1249
1268
self .insert1 (key )
1250
1269
for unit_peak_waveform , unit_electrode_waveforms in yield_unit_waveforms ():
1251
1270
if unit_peak_waveform :
1252
- self .PeakWaveform .insert1 (unit_peak_waveform , ignore_extra_fields = True )
1271
+ self .PeakWaveform .insert1 (
1272
+ unit_peak_waveform , ignore_extra_fields = True )
1253
1273
if unit_electrode_waveforms :
1254
- self .Waveform .insert (unit_electrode_waveforms , ignore_extra_fields = True )
1274
+ self .Waveform .insert (
1275
+ unit_electrode_waveforms , ignore_extra_fields = True )
1255
1276
1256
1277
1257
1278
@schema
@@ -1396,7 +1417,8 @@ def get_spikeglx_meta_filepath(ephys_recording_key: dict) -> str:
1396
1417
ProbeInsertion * probe .Probe & ephys_recording_key
1397
1418
).fetch1 ("probe" )
1398
1419
1399
- spikeglx_meta_filepaths = [fp for fp in session_dir .rglob ("*.ap.meta" )]
1420
+ spikeglx_meta_filepaths = [
1421
+ fp for fp in session_dir .rglob ("*.ap.meta" )]
1400
1422
for meta_filepath in spikeglx_meta_filepaths :
1401
1423
spikeglx_meta = spikeglx .SpikeGLXMeta (meta_filepath )
1402
1424
if str (spikeglx_meta .probe_SN ) == inserted_probe_serial_number :
@@ -1436,7 +1458,8 @@ def get_neuropixels_channel2electrode_map(
1436
1458
) -> dict :
1437
1459
"""Get the channel map for neuropixels probe."""
1438
1460
if acq_software == "SpikeGLX" :
1439
- spikeglx_meta_filepath = get_spikeglx_meta_filepath (ephys_recording_key )
1461
+ spikeglx_meta_filepath = get_spikeglx_meta_filepath (
1462
+ ephys_recording_key )
1440
1463
spikeglx_meta = spikeglx .SpikeGLXMeta (spikeglx_meta_filepath )
1441
1464
electrode_config_key = (
1442
1465
EphysRecording * probe .ElectrodeConfig & ephys_recording_key
@@ -1491,7 +1514,8 @@ def generate_electrode_config(probe_type: str, electrode_keys: list) -> dict:
1491
1514
dict: representing a key of the probe.ElectrodeConfig table
1492
1515
"""
1493
1516
# compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode)
1494
- electrode_config_hash = dict_to_uuid ({k ["electrode" ]: k for k in electrode_keys })
1517
+ electrode_config_hash = dict_to_uuid (
1518
+ {k ["electrode" ]: k for k in electrode_keys })
1495
1519
1496
1520
electrode_list = sorted ([k ["electrode" ] for k in electrode_keys ])
1497
1521
electrode_gaps = (
@@ -1561,9 +1585,11 @@ def get_recording_channels_details(ephys_recording_key: dict) -> np.array:
1561
1585
channels_details ["num_channels" ] = len (channels_details ["channel_ind" ])
1562
1586
1563
1587
if acq_software == "SpikeGLX" :
1564
- spikeglx_meta_filepath = get_spikeglx_meta_filepath (ephys_recording_key )
1588
+ spikeglx_meta_filepath = get_spikeglx_meta_filepath (
1589
+ ephys_recording_key )
1565
1590
spikeglx_recording = spikeglx .SpikeGLX (spikeglx_meta_filepath .parent )
1566
- channels_details ["uVPerBit" ] = spikeglx_recording .get_channel_bit_volts ("ap" )[0 ]
1591
+ channels_details ["uVPerBit" ] = spikeglx_recording .get_channel_bit_volts ("ap" )[
1592
+ 0 ]
1567
1593
channels_details ["connected" ] = np .array (
1568
1594
[v for * _ , v in spikeglx_recording .apmeta .shankmap ["data" ]]
1569
1595
)
0 commit comments