Skip to content

Commit 06a3f70

Browse files
committed
format: black formatting
1 parent 3666cda commit 06a3f70

File tree

2 files changed

+19
-10
lines changed

2 files changed

+19
-10
lines changed

element_array_ephys/ephys_no_curation.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1256,7 +1256,9 @@ def make(self, key):
12561256
unit_peak_channel = {u: chn[0] for u, chn in unit_peak_channel.items()}
12571257

12581258
# reorder channel2electrode_map according to recording channel ids
1259-
channel_indices = sorting_analyzer.channel_ids_to_indices(sorting_analyzer.channel_ids).tolist()
1259+
channel_indices = sorting_analyzer.channel_ids_to_indices(
1260+
sorting_analyzer.channel_ids
1261+
).tolist()
12601262
channel2electrode_map = {
12611263
chn_idx: channel2electrode_map[chn_idx] for chn_idx in channel_indices
12621264
}
@@ -1500,7 +1502,9 @@ def make(self, key):
15001502
if si_sorting_analyzer_dir.exists(): # read from spikeinterface outputs
15011503
sorting_analyzer = si.load_sorting_analyzer(folder=si_sorting_analyzer_dir)
15021504
qc_metrics = sorting_analyzer.get_extension("quality_metrics").get_data()
1503-
template_metrics = sorting_analyzer.get_extension("template_metrics").get_data()
1505+
template_metrics = sorting_analyzer.get_extension(
1506+
"template_metrics"
1507+
).get_data()
15041508
metrics_df = pd.concat([qc_metrics, template_metrics], axis=1)
15051509

15061510
metrics_df.rename(
@@ -1514,7 +1518,7 @@ def make(self, key):
15141518
"drift_mad": "cumulative_drift",
15151519
"half_width": "halfwidth",
15161520
"peak_trough_ratio": "pt_ratio",
1517-
"peak_to_valley": "duration"
1521+
"peak_to_valley": "duration",
15181522
},
15191523
inplace=True,
15201524
)

element_array_ephys/spike_sorting/si_spike_sorting.py

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -270,28 +270,33 @@ def make(self, key):
270270
overwrite=True,
271271
)
272272

273-
job_kwargs = params["SI_POSTPROCESSING_PARAMS"].get("job_kwargs", {"n_jobs": -1, "chunk_duration": "1s"})
273+
job_kwargs = params["SI_POSTPROCESSING_PARAMS"].get(
274+
"job_kwargs", {"n_jobs": -1, "chunk_duration": "1s"}
275+
)
274276
extensions_params = params["SI_POSTPROCESSING_PARAMS"].get("extensions", {})
275277
# The order of extension computation is drawn from sorting_analyzer.get_computable_extensions()
276278
# each extension is parameterized by params specified in extensions_params dictionary (skip if not specified)
277-
extensions_to_compute = {ext_name: extensions_params[ext_name]
278-
for ext_name in sorting_analyzer.get_computable_extensions()
279-
if ext_name in extensions_params}
279+
extensions_to_compute = {
280+
ext_name: extensions_params[ext_name]
281+
for ext_name in sorting_analyzer.get_computable_extensions()
282+
if ext_name in extensions_params
283+
}
280284

281285
sorting_analyzer.compute(extensions_to_compute, **job_kwargs)
282286

283287
# Save to phy format
284288
if params["SI_POSTPROCESSING_PARAMS"].get("export_to_phy", False):
285289
si.exporters.export_to_phy(
286-
sorting_analyzer=sorting_analyzer, output_folder=output_dir / sorter_name / "phy",
287-
**job_kwargs
290+
sorting_analyzer=sorting_analyzer,
291+
output_folder=output_dir / sorter_name / "phy",
292+
**job_kwargs,
288293
)
289294
# Generate spike interface report
290295
if params["SI_POSTPROCESSING_PARAMS"].get("export_report", True):
291296
si.exporters.export_report(
292297
sorting_analyzer=sorting_analyzer,
293298
output_folder=output_dir / sorter_name / "spikeinterface_report",
294-
**job_kwargs
299+
**job_kwargs,
295300
)
296301

297302
self.insert1(

0 commit comments

Comments
 (0)