|
8 | 8 | import datajoint as dj
|
9 | 9 | import numpy as np
|
10 | 10 | import pandas as pd
|
| 11 | +import spikeinterface as si |
11 | 12 | from element_interface.utils import dict_to_uuid, find_full_path, find_root_directory
|
| 13 | +from spikeinterface import exporters, postprocessing, qualitymetrics, sorters |
12 | 14 |
|
13 | 15 | from . import ephys_report, probe
|
14 | 16 | from .readers import kilosort, openephys, spikeglx
|
|
19 | 21 |
|
20 | 22 | _linking_module = None
|
21 | 23 |
|
22 |
| -import spikeinterface as si |
23 |
| -from spikeinterface import exporters, postprocessing, qualitymetrics, sorters |
24 |
| - |
25 | 24 |
|
26 | 25 | def activate(
|
27 | 26 | ephys_schema_name: str,
|
@@ -327,129 +326,154 @@ def make(self, key):
|
327 | 326 | break
|
328 | 327 | else:
|
329 | 328 | raise FileNotFoundError(
|
330 |
| - "Ephys recording data not found!" |
| 329 | + f"Ephys recording data not found! for {key}." |
331 | 330 | "Neither SpikeGLX nor Open Ephys recording files found"
|
332 | 331 | )
|
333 | 332 |
|
334 |
| - supported_probe_types = probe.ProbeType.fetch("probe_type") |
| 333 | + if acq_software not in AcquisitionSoftware.fetch("acq_software"): |
| 334 | + raise NotImplementedError( |
| 335 | + f"Processing ephys files from acquisition software of type {acq_software} is not yet implemented." |
| 336 | + ) |
335 | 337 |
|
336 |
| - if acq_software == "SpikeGLX": |
337 |
| - for meta_filepath in ephys_meta_filepaths: |
338 |
| - spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) |
339 |
| - if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: |
340 |
| - break |
341 |
| - else: |
342 |
| - raise FileNotFoundError( |
343 |
| - "No SpikeGLX data found for probe insertion: {}".format(key) |
344 |
| - ) |
| 338 | + else: |
| 339 | + supported_probe_types = probe.ProbeType.fetch("probe_type") |
| 340 | + |
| 341 | + if acq_software == "SpikeGLX": |
| 342 | + for meta_filepath in ephys_meta_filepaths: |
| 343 | + spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) |
| 344 | + if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: |
| 345 | + break |
| 346 | + else: |
| 347 | + raise FileNotFoundError( |
| 348 | + "No SpikeGLX data found for probe insertion: {}".format(key) |
| 349 | + ) |
| 350 | + |
| 351 | + if spikeglx_meta.probe_model in supported_probe_types: |
| 352 | + probe_type = spikeglx_meta.probe_model |
| 353 | + electrode_query = probe.ProbeType.Electrode & { |
| 354 | + "probe_type": probe_type |
| 355 | + } |
345 | 356 |
|
346 |
| - if spikeglx_meta.probe_model in supported_probe_types: |
347 |
| - probe_type = spikeglx_meta.probe_model |
348 |
| - electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} |
| 357 | + probe_electrodes = { |
| 358 | + (shank, shank_col, shank_row): key |
| 359 | + for key, shank, shank_col, shank_row in zip( |
| 360 | + *electrode_query.fetch( |
| 361 | + "KEY", "shank", "shank_col", "shank_row" |
| 362 | + ) |
| 363 | + ) |
| 364 | + } |
349 | 365 |
|
350 |
| - probe_electrodes = { |
351 |
| - (shank, shank_col, shank_row): key |
352 |
| - for key, shank, shank_col, shank_row in zip( |
353 |
| - *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") |
| 366 | + electrode_group_members = [ |
| 367 | + probe_electrodes[(shank, shank_col, shank_row)] |
| 368 | + for shank, shank_col, shank_row, _ in spikeglx_meta.shankmap[ |
| 369 | + "data" |
| 370 | + ] |
| 371 | + ] |
| 372 | + else: |
| 373 | + raise NotImplementedError( |
| 374 | + "Processing for neuropixels probe model" |
| 375 | + " {} not yet implemented".format(spikeglx_meta.probe_model) |
354 | 376 | )
|
355 |
| - } |
356 | 377 |
|
357 |
| - electrode_group_members = [ |
358 |
| - probe_electrodes[(shank, shank_col, shank_row)] |
359 |
| - for shank, shank_col, shank_row, _ in spikeglx_meta.shankmap["data"] |
360 |
| - ] |
361 |
| - else: |
362 |
| - raise NotImplementedError( |
363 |
| - "Processing for neuropixels probe model" |
364 |
| - " {} not yet implemented".format(spikeglx_meta.probe_model) |
| 378 | + self.insert1( |
| 379 | + { |
| 380 | + **key, |
| 381 | + **generate_electrode_config( |
| 382 | + probe_type, electrode_group_members |
| 383 | + ), |
| 384 | + "acq_software": acq_software, |
| 385 | + "sampling_rate": spikeglx_meta.meta["imSampRate"], |
| 386 | + "recording_datetime": spikeglx_meta.recording_time, |
| 387 | + "recording_duration": ( |
| 388 | + spikeglx_meta.recording_duration |
| 389 | + or spikeglx.retrieve_recording_duration(meta_filepath) |
| 390 | + ), |
| 391 | + } |
365 | 392 | )
|
366 | 393 |
|
367 |
| - self.insert1( |
368 |
| - { |
369 |
| - **key, |
370 |
| - **generate_electrode_config(probe_type, electrode_group_members), |
371 |
| - "acq_software": acq_software, |
372 |
| - "sampling_rate": spikeglx_meta.meta["imSampRate"], |
373 |
| - "recording_datetime": spikeglx_meta.recording_time, |
374 |
| - "recording_duration": ( |
375 |
| - spikeglx_meta.recording_duration |
376 |
| - or spikeglx.retrieve_recording_duration(meta_filepath) |
377 |
| - ), |
378 |
| - } |
379 |
| - ) |
380 |
| - |
381 |
| - root_dir = find_root_directory(get_ephys_root_data_dir(), meta_filepath) |
382 |
| - self.EphysFile.insert1( |
383 |
| - {**key, "file_path": meta_filepath.relative_to(root_dir).as_posix()} |
384 |
| - ) |
385 |
| - elif acq_software == "Open Ephys": |
386 |
| - dataset = openephys.OpenEphys(session_dir) |
387 |
| - for serial_number, probe_data in dataset.probes.items(): |
388 |
| - if str(serial_number) == inserted_probe_serial_number: |
389 |
| - break |
390 |
| - else: |
391 |
| - raise FileNotFoundError( |
392 |
| - "No Open Ephys data found for probe insertion: {}".format(key) |
| 394 | + root_dir = find_root_directory(get_ephys_root_data_dir(), meta_filepath) |
| 395 | + self.EphysFile.insert1( |
| 396 | + {**key, "file_path": meta_filepath.relative_to(root_dir).as_posix()} |
393 | 397 | )
|
| 398 | + elif acq_software == "Open Ephys": |
| 399 | + dataset = openephys.OpenEphys(session_dir) |
| 400 | + for serial_number, probe_data in dataset.probes.items(): |
| 401 | + if str(serial_number) == inserted_probe_serial_number: |
| 402 | + break |
| 403 | + else: |
| 404 | + raise FileNotFoundError( |
| 405 | + "No Open Ephys data found for probe insertion: {}".format(key) |
| 406 | + ) |
394 | 407 |
|
395 |
| - if not probe_data.ap_meta: |
396 |
| - raise IOError( |
397 |
| - 'No analog signals found - check "structure.oebin" file or "continuous" directory' |
398 |
| - ) |
| 408 | + if not probe_data.ap_meta: |
| 409 | + raise IOError( |
| 410 | + 'No analog signals found - check "structure.oebin" file or "continuous" directory' |
| 411 | + ) |
399 | 412 |
|
400 |
| - if probe_data.probe_model in supported_probe_types: |
401 |
| - probe_type = probe_data.probe_model |
402 |
| - electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} |
| 413 | + if probe_data.probe_model in supported_probe_types: |
| 414 | + probe_type = probe_data.probe_model |
| 415 | + electrode_query = probe.ProbeType.Electrode & { |
| 416 | + "probe_type": probe_type |
| 417 | + } |
403 | 418 |
|
404 |
| - probe_electrodes = { |
405 |
| - key["electrode"]: key for key in electrode_query.fetch("KEY") |
406 |
| - } |
| 419 | + probe_electrodes = { |
| 420 | + key["electrode"]: key for key in electrode_query.fetch("KEY") |
| 421 | + } |
407 | 422 |
|
408 |
| - electrode_group_members = [ |
409 |
| - probe_electrodes[channel_idx] |
410 |
| - for channel_idx in probe_data.ap_meta["channels_indices"] |
411 |
| - ] |
412 |
| - else: |
413 |
| - raise NotImplementedError( |
414 |
| - "Processing for neuropixels" |
415 |
| - " probe model {} not yet implemented".format(probe_data.probe_model) |
| 423 | + electrode_group_members = [ |
| 424 | + probe_electrodes[channel_idx] |
| 425 | + for channel_idx in probe_data.ap_meta["channels_indices"] |
| 426 | + ] |
| 427 | + else: |
| 428 | + raise NotImplementedError( |
| 429 | + "Processing for neuropixels" |
| 430 | + " probe model {} not yet implemented".format( |
| 431 | + probe_data.probe_model |
| 432 | + ) |
| 433 | + ) |
| 434 | + |
| 435 | + self.insert1( |
| 436 | + { |
| 437 | + **key, |
| 438 | + **generate_electrode_config( |
| 439 | + probe_type, electrode_group_members |
| 440 | + ), |
| 441 | + "acq_software": acq_software, |
| 442 | + "sampling_rate": probe_data.ap_meta["sample_rate"], |
| 443 | + "recording_datetime": probe_data.recording_info[ |
| 444 | + "recording_datetimes" |
| 445 | + ][0], |
| 446 | + "recording_duration": np.sum( |
| 447 | + probe_data.recording_info["recording_durations"] |
| 448 | + ), |
| 449 | + } |
416 | 450 | )
|
417 | 451 |
|
418 |
| - self.insert1( |
419 |
| - { |
420 |
| - **key, |
421 |
| - **generate_electrode_config(probe_type, electrode_group_members), |
422 |
| - "acq_software": acq_software, |
423 |
| - "sampling_rate": probe_data.ap_meta["sample_rate"], |
424 |
| - "recording_datetime": probe_data.recording_info[ |
425 |
| - "recording_datetimes" |
426 |
| - ][0], |
427 |
| - "recording_duration": np.sum( |
428 |
| - probe_data.recording_info["recording_durations"] |
429 |
| - ), |
430 |
| - } |
431 |
| - ) |
| 452 | + root_dir = find_root_directory( |
| 453 | + get_ephys_root_data_dir(), |
| 454 | + probe_data.recording_info["recording_files"][0], |
| 455 | + ) |
| 456 | + self.EphysFile.insert( |
| 457 | + [ |
| 458 | + {**key, "file_path": fp.relative_to(root_dir).as_posix()} |
| 459 | + for fp in probe_data.recording_info["recording_files"] |
| 460 | + ] |
| 461 | + ) |
| 462 | + # Explicitly garbage collect "dataset" as these may have large memory footprint and may not be cleared fast enough |
| 463 | + del probe_data, dataset |
| 464 | + gc.collect() |
432 | 465 |
|
433 |
| - root_dir = find_root_directory( |
434 |
| - get_ephys_root_data_dir(), |
435 |
| - probe_data.recording_info["recording_files"][0], |
| 466 | + # Insert channel information |
| 467 | + # Get channel and electrode-site mapping |
| 468 | + channel2electrodes = get_neuropixels_channel2electrode_map( |
| 469 | + key, acq_software |
436 | 470 | )
|
437 |
| - self.EphysFile.insert( |
| 471 | + self.Channel.insert( |
438 | 472 | [
|
439 |
| - {**key, "file_path": fp.relative_to(root_dir).as_posix()} |
440 |
| - for fp in probe_data.recording_info["recording_files"] |
| 473 | + {**key, "channel_idx": channel_idx, **channel_info} |
| 474 | + for channel_idx, channel_info in channel2electrodes.items() |
441 | 475 | ]
|
442 | 476 | )
|
443 |
| - # explicitly garbage collect "dataset" |
444 |
| - # as these may have large memory footprint and may not be cleared fast enough |
445 |
| - del probe_data, dataset |
446 |
| - gc.collect() |
447 |
| - else: |
448 |
| - raise NotImplementedError( |
449 |
| - f"Processing ephys files from" |
450 |
| - f" acquisition software of type {acq_software} is" |
451 |
| - f" not yet implemented" |
452 |
| - ) |
453 | 477 |
|
454 | 478 |
|
455 | 479 | @schema
|
@@ -1209,11 +1233,11 @@ def make(self, key):
|
1209 | 1233 | we: si.WaveformExtractor = si.load_waveforms(
|
1210 | 1234 | output_dir / "waveform", with_recording=False
|
1211 | 1235 | )
|
1212 |
| - unit_id_to_peak_channel_indices: dict[int, np.ndarray] = ( |
1213 |
| - si.ChannelSparsity.from_best_channels( |
1214 |
| - we, 1, peak_sign="neg" |
1215 |
| - ).unit_id_to_channel_indices |
1216 |
| - ) # {unit: peak_channel_index} |
| 1236 | + unit_id_to_peak_channel_indices: dict[ |
| 1237 | + int, np.ndarray |
| 1238 | + ] = si.ChannelSparsity.from_best_channels( |
| 1239 | + we, 1, peak_sign="neg" |
| 1240 | + ).unit_id_to_channel_indices # {unit: peak_channel_index} |
1217 | 1241 |
|
1218 | 1242 | units = (CuratedClustering.Unit & key).fetch("KEY", order_by="unit")
|
1219 | 1243 |
|
|
0 commit comments