Skip to content

Commit 6a6e7b7

Browse files
Merge pull request #186 from ttngu207/datajoint-spikeinterface
Pull from `main` - minor bugfix
2 parents d44dbaa + d86928b commit 6a6e7b7

File tree

16 files changed

+2472
-221
lines changed

16 files changed

+2472
-221
lines changed

.github/workflows/release.yaml

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,6 @@ on:
44
jobs:
55
make_github_release:
66
uses: datajoint/.github/.github/workflows/make_github_release.yaml@main
7-
pypi_release:
8-
needs: make_github_release
9-
uses: datajoint/.github/.github/workflows/pypi_release.yaml@main
10-
secrets:
11-
TWINE_USERNAME: ${{secrets.TWINE_USERNAME}}
12-
TWINE_PASSWORD: ${{secrets.TWINE_PASSWORD}}
13-
with:
14-
UPLOAD_URL: ${{needs.make_github_release.outputs.release_upload_url}}
157
mkdocs_release:
168
uses: datajoint/.github/.github/workflows/mkdocs_release.yaml@main
179
permissions:

CHANGELOG.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,18 @@
33
Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and
44
[Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention.
55

6+
## [0.3.4] - 2024-03-22
7+
8+
+ Add - pytest
9+
+ Update - Ephys schema changed from `ephys_acute` to `ephys_no_curation` in `tutorial.ipynb`
10+
11+
12+
## [0.3.3] - 2024-01-24
13+
+ Update - remove PyPi release from `release.yml` since it will fail after the new `setup.py`
14+
15+
## [0.3.2] - 2024-01-12
16+
+ Fix - `probe_geometry` bugfix for incorrect handling of probes with staggered electrode positions
17+
618
## [0.3.1] - 2023-11-28
719
+ Update - Flowchart borders for consistency with other DataJoint Elements
820
+ Fix - `dj.config()` setup moved to `tutorial_pipeline.py` instead of `__init__.py`

element_array_ephys/ephys_acute.py

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1044,9 +1044,11 @@ def make(self, key):
10441044
spike_time_key = (
10451045
"spike_times_sec_adj"
10461046
if "spike_times_sec_adj" in kilosort_dataset.data
1047-
else "spike_times_sec"
1048-
if "spike_times_sec" in kilosort_dataset.data
1049-
else "spike_times"
1047+
else (
1048+
"spike_times_sec"
1049+
if "spike_times_sec" in kilosort_dataset.data
1050+
else "spike_times"
1051+
)
10501052
)
10511053
spike_times = kilosort_dataset.data[spike_time_key]
10521054
kilosort_dataset.extract_spike_depths()
@@ -1081,11 +1083,13 @@ def make(self, key):
10811083
"spike_sites": spike_sites[
10821084
kilosort_dataset.data["spike_clusters"] == unit
10831085
],
1084-
"spike_depths": spike_depths[
1085-
kilosort_dataset.data["spike_clusters"] == unit
1086-
]
1087-
if spike_depths is not None
1088-
else None,
1086+
"spike_depths": (
1087+
spike_depths[
1088+
kilosort_dataset.data["spike_clusters"] == unit
1089+
]
1090+
if spike_depths is not None
1091+
else None
1092+
),
10891093
}
10901094
)
10911095

element_array_ephys/ephys_chronic.py

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -973,9 +973,11 @@ def make(self, key):
973973
spike_time_key = (
974974
"spike_times_sec_adj"
975975
if "spike_times_sec_adj" in kilosort_dataset.data
976-
else "spike_times_sec"
977-
if "spike_times_sec" in kilosort_dataset.data
978-
else "spike_times"
976+
else (
977+
"spike_times_sec"
978+
if "spike_times_sec" in kilosort_dataset.data
979+
else "spike_times"
980+
)
979981
)
980982
spike_times = kilosort_dataset.data[spike_time_key]
981983
kilosort_dataset.extract_spike_depths()
@@ -1010,11 +1012,13 @@ def make(self, key):
10101012
"spike_sites": spike_sites[
10111013
kilosort_dataset.data["spike_clusters"] == unit
10121014
],
1013-
"spike_depths": spike_depths[
1014-
kilosort_dataset.data["spike_clusters"] == unit
1015-
]
1016-
if spike_depths is not None
1017-
else None,
1015+
"spike_depths": (
1016+
spike_depths[
1017+
kilosort_dataset.data["spike_clusters"] == unit
1018+
]
1019+
if spike_depths is not None
1020+
else None
1021+
),
10181022
}
10191023
)
10201024

element_array_ephys/ephys_no_curation.py

Lines changed: 48 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -371,31 +371,30 @@ def make(self, key):
371371
probe_type, electrode_group_members
372372
)
373373

374-
self.insert1(
375-
{
376-
**key,
377-
"electrode_config_hash": econfig_entry["electrode_config_hash"],
378-
"acq_software": acq_software,
379-
"sampling_rate": spikeglx_meta.meta["imSampRate"],
380-
"recording_datetime": spikeglx_meta.recording_time,
381-
"recording_duration": (
382-
spikeglx_meta.recording_duration
383-
or spikeglx.retrieve_recording_duration(spikeglx_meta_filepath)
384-
),
385-
}
386-
)
374+
ephys_recording_entry = {
375+
**key,
376+
"electrode_config_hash": econfig_entry["electrode_config_hash"],
377+
"acq_software": acq_software,
378+
"sampling_rate": spikeglx_meta.meta["imSampRate"],
379+
"recording_datetime": spikeglx_meta.recording_time,
380+
"recording_duration": (
381+
spikeglx_meta.recording_duration
382+
or spikeglx.retrieve_recording_duration(spikeglx_meta_filepath)
383+
),
384+
}
387385

388386
root_dir = find_root_directory(
389387
get_ephys_root_data_dir(), spikeglx_meta_filepath
390388
)
391-
self.EphysFile.insert1(
389+
390+
ephys_file_entries = [
392391
{
393392
**key,
394393
"file_path": spikeglx_meta_filepath.relative_to(
395394
root_dir
396395
).as_posix(),
397396
}
398-
)
397+
]
399398

400399
# Insert channel information
401400
# Get channel and electrode-site mapping
@@ -417,13 +416,11 @@ def make(self, key):
417416
spikeglx_meta.shankmap["data"]
418417
)
419418
}
420-
self.Channel.insert(
421-
[
422-
{**key, "channel_idx": channel_idx, **channel_info}
423-
for channel_idx, channel_info in channel2electrode_map.items()
424-
]
425-
)
426419

420+
ephys_channel_entries = [
421+
{**key, "channel_idx": channel_idx, **channel_info}
422+
for channel_idx, channel_info in channel2electrode_map.items()
423+
]
427424
elif acq_software == "Open Ephys":
428425
dataset = openephys.OpenEphys(session_dir)
429426
for serial_number, probe_data in dataset.probes.items():
@@ -460,31 +457,29 @@ def make(self, key):
460457
probe_type, electrode_group_members
461458
)
462459

463-
self.insert1(
464-
{
465-
**key,
466-
"electrode_config_hash": econfig_entry["electrode_config_hash"],
467-
"acq_software": acq_software,
468-
"sampling_rate": probe_data.ap_meta["sample_rate"],
469-
"recording_datetime": probe_data.recording_info[
470-
"recording_datetimes"
471-
][0],
472-
"recording_duration": np.sum(
473-
probe_data.recording_info["recording_durations"]
474-
),
475-
}
476-
)
460+
ephys_recording_entry = {
461+
**key,
462+
"electrode_config_hash": econfig_entry["electrode_config_hash"],
463+
"acq_software": acq_software,
464+
"sampling_rate": probe_data.ap_meta["sample_rate"],
465+
"recording_datetime": probe_data.recording_info["recording_datetimes"][
466+
0
467+
],
468+
"recording_duration": np.sum(
469+
probe_data.recording_info["recording_durations"]
470+
),
471+
}
477472

478473
root_dir = find_root_directory(
479474
get_ephys_root_data_dir(),
480475
probe_data.recording_info["recording_files"][0],
481476
)
482-
self.EphysFile.insert(
483-
[
484-
{**key, "file_path": fp.relative_to(root_dir).as_posix()}
485-
for fp in probe_data.recording_info["recording_files"]
486-
]
487-
)
477+
478+
ephys_file_entries = [
479+
{**key, "file_path": fp.relative_to(root_dir).as_posix()}
480+
for fp in probe_data.recording_info["recording_files"]
481+
]
482+
488483
# Explicitly garbage collect "dataset" as these may have large memory footprint and may not be cleared fast enough
489484
del probe_data, dataset
490485
gc.collect()
@@ -503,11 +498,14 @@ def make(self, key):
503498
channel_idx: probe_electrodes[channel_idx]
504499
for channel_idx in probe_dataset.ap_meta["channels_indices"]
505500
}
506-
self.Channel.insert(
507-
[
508-
{**key, "channel_idx": channel_idx, **channel_info}
509-
for channel_idx, channel_info in channel2electrode_map.items()
510-
]
501+
502+
ephys_channel_entries = [
503+
{**key, "channel_idx": channel_idx, **channel_info}
504+
for channel_idx, channel_info in channel2electrode_map.items()
505+
]
506+
else:
507+
raise NotImplementedError(
508+
f"Processing ephys files from acquisition software of type {acq_software} is not yet implemented."
511509
)
512510

513511
# Insert into probe.ElectrodeConfig (recording configuration)
@@ -517,6 +515,10 @@ def make(self, key):
517515
probe.ElectrodeConfig.insert1(econfig_entry)
518516
probe.ElectrodeConfig.Electrode.insert(econfig_electrodes)
519517

518+
self.insert1(ephys_recording_entry)
519+
self.EphysFile.insert(ephys_file_entries)
520+
self.Channel.insert(ephys_channel_entries)
521+
520522

521523
@schema
522524
class LFP(dj.Imported):

element_array_ephys/ephys_precluster.py

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -962,9 +962,11 @@ def make(self, key):
962962
spike_time_key = (
963963
"spike_times_sec_adj"
964964
if "spike_times_sec_adj" in kilosort_dataset.data
965-
else "spike_times_sec"
966-
if "spike_times_sec" in kilosort_dataset.data
967-
else "spike_times"
965+
else (
966+
"spike_times_sec"
967+
if "spike_times_sec" in kilosort_dataset.data
968+
else "spike_times"
969+
)
968970
)
969971
spike_times = kilosort_dataset.data[spike_time_key]
970972
kilosort_dataset.extract_spike_depths()
@@ -999,11 +1001,13 @@ def make(self, key):
9991001
"spike_sites": spike_sites[
10001002
kilosort_dataset.data["spike_clusters"] == unit
10011003
],
1002-
"spike_depths": spike_depths[
1003-
kilosort_dataset.data["spike_clusters"] == unit
1004-
]
1005-
if spike_depths is not None
1006-
else None,
1004+
"spike_depths": (
1005+
spike_depths[
1006+
kilosort_dataset.data["spike_clusters"] == unit
1007+
]
1008+
if spike_depths is not None
1009+
else None
1010+
),
10071011
}
10081012
)
10091013

element_array_ephys/ephys_report.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -75,9 +75,11 @@ def make(self, key):
7575
fig_prefix = (
7676
"-".join(
7777
[
78-
v.strftime("%Y%m%d%H%M%S")
79-
if isinstance(v, datetime.datetime)
80-
else str(v)
78+
(
79+
v.strftime("%Y%m%d%H%M%S")
80+
if isinstance(v, datetime.datetime)
81+
else str(v)
82+
)
8183
for v in key.values()
8284
]
8385
)

element_array_ephys/probe.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
"""
22
Neuropixels Probes
33
"""
4+
45
import datajoint as dj
56

67
from .readers import probe_geometry

element_array_ephys/readers/probe_geometry.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -200,7 +200,7 @@ def build_electrode_layouts(
200200
row_offset = np.zeros_like(x_coords)
201201
else:
202202
assert len(row_offset) == row_count
203-
row_offset = np.tile(row_offset, col_count_per_shank)
203+
row_offset = np.repeat(row_offset, col_count_per_shank)
204204
x_coords = x_coords + row_offset
205205

206206
shank_cols = np.tile(range(col_count_per_shank), row_count)

element_array_ephys/spike_sorting/si_spike_sorting.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ def make(self, key):
153153
# Run preprocessing and save results to output folder
154154
si_preproc_func = getattr(si_preprocessing, params["SI_PREPROCESSING_METHOD"])
155155
si_recording = si_preproc_func(si_recording)
156-
si_recording.dump_to_pickle(file_path=recording_file)
156+
si_recording.dump_to_pickle(file_path=recording_file, relative_to=output_dir)
157157

158158
self.insert1(
159159
{
@@ -206,7 +206,7 @@ def make(self, key):
206206
sorting_save_path = (
207207
output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl"
208208
)
209-
si_sorting.dump_to_pickle(sorting_save_path)
209+
si_sorting.dump_to_pickle(sorting_save_path, relative_to=output_dir)
210210

211211
self.insert1(
212212
{

0 commit comments

Comments
 (0)